From 61dc82ada05002d2e00b1a426776b54388951bdc Mon Sep 17 00:00:00 2001
From: Catherine Lee <55311782+c298lee@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:29:19 -0400
Subject: [PATCH 001/139] chore(replay): Remove object inspector (#78236)
- Switch from ObjectInspector to StructuredEventData in the console tab
- Removes ObjectInspector and react-inspector!!
Before:
After:
Closes https://github.com/getsentry/sentry/issues/70042
---
package.json | 1 -
static/app/components/objectInspector.tsx | 101 ------------------
.../replays/breadcrumbs/breadcrumbItem.tsx | 5 +-
.../replayPreferenceDropdown.stories.tsx | 4 +-
.../breadcrumbs/useBreadcrumbFilters.tsx | 2 +-
.../replays/detail/console/consoleLogRow.tsx | 9 +-
.../views/replays/detail/console/format.tsx | 44 +++++---
.../views/replays/detail/console/index.tsx | 2 +-
.../detail/console/messageFormatter.spec.tsx | 50 ++++-----
.../detail/console/messageFormatter.tsx | 13 ++-
.../detail/console/useConsoleFilters.tsx | 2 +-
.../detail/useVirtualizedInspector.tsx | 8 +-
.../replays/detail/useVirtualizedList.tsx | 2 +-
yarn.lock | 5 -
14 files changed, 86 insertions(+), 162 deletions(-)
delete mode 100644 static/app/components/objectInspector.tsx
diff --git a/package.json b/package.json
index e8cb6ce69cfed4..7879824ff1b507 100644
--- a/package.json
+++ b/package.json
@@ -54,7 +54,6 @@
"@react-types/shared": "^3.24.1",
"@rsdoctor/webpack-plugin": "0.4.4",
"@sentry-internal/global-search": "^1.0.0",
- "@sentry-internal/react-inspector": "6.0.1-4",
"@sentry-internal/rrweb": "2.26.0",
"@sentry-internal/rrweb-player": "2.26.0",
"@sentry-internal/rrweb-snapshot": "2.26.0",
diff --git a/static/app/components/objectInspector.tsx b/static/app/components/objectInspector.tsx
deleted file mode 100644
index 39a041465cc6ad..00000000000000
--- a/static/app/components/objectInspector.tsx
+++ /dev/null
@@ -1,101 +0,0 @@
-import type {ComponentProps, MouseEvent} from 'react';
-import {useMemo} from 'react';
-import {useTheme} from '@emotion/react';
-import styled from '@emotion/styled';
-import {
- chromeDark,
- chromeLight,
- ObjectInspector as OrigObjectInspector,
-} from '@sentry-internal/react-inspector';
-
-import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton';
-import ConfigStore from 'sentry/stores/configStore';
-import {useLegacyStore} from 'sentry/stores/useLegacyStore';
-import {space} from 'sentry/styles/space';
-
-type Props = Omit, 'theme'> & {
- onCopy?: (copiedCode: string) => void;
- showCopyButton?: boolean;
- theme?: Record;
-};
-
-/**
- * @deprecated use `StructuredEventData` or `StructuredData` instead.
- */
-function ObjectInspector({data, onCopy, showCopyButton, theme, ...props}: Props) {
- const config = useLegacyStore(ConfigStore);
- const emotionTheme = useTheme();
- const isDark = config.theme === 'dark';
-
- const INSPECTOR_THEME = useMemo(
- () => ({
- ...(isDark ? chromeDark : chromeLight),
-
- // Reset some theme values
- BASE_COLOR: 'inherit',
- ERROR_COLOR: emotionTheme.red400,
- TREENODE_FONT_FAMILY: emotionTheme.text.familyMono,
- TREENODE_FONT_SIZE: 'inherit',
- TREENODE_LINE_HEIGHT: 'inherit',
- BASE_BACKGROUND_COLOR: 'none',
- ARROW_FONT_SIZE: '10px',
-
- OBJECT_PREVIEW_OBJECT_MAX_PROPERTIES: 1,
- ...theme,
- }),
- [isDark, theme, emotionTheme.red400, emotionTheme.text]
- );
-
- const inspector = (
-
- );
- if (showCopyButton) {
- return (
-
-
- {inspector}
-
- );
- }
-
- return inspector;
-}
-
-const InspectorWrapper = styled('div')`
- margin-right: ${space(4)};
-`;
-
-const Wrapper = styled('div')`
- position: relative;
-
- /*
- We need some minimum vertical height so the copy button has room.
- But don't try to use min-height because then whitespace would be inconsistent.
- */
- padding-bottom: ${space(1.5)};
-`;
-
-const StyledCopyButton = styled(CopyToClipboardButton)`
- position: absolute;
- top: 0;
- right: ${space(0.5)};
-`;
-
-export type OnExpandCallback = (
- path: string,
- expandedState: Record,
- event: MouseEvent
-) => void;
-
-export default ObjectInspector;
diff --git a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
index a289528762da1d..ae80f6b365c4dd 100644
--- a/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
+++ b/static/app/components/replays/breadcrumbs/breadcrumbItem.tsx
@@ -44,6 +44,7 @@ import useOrganization from 'sentry/utils/useOrganization';
import useProjectFromSlug from 'sentry/utils/useProjectFromSlug';
import IconWrapper from 'sentry/views/replays/detail/iconWrapper';
import TimestampButton from 'sentry/views/replays/detail/timestampButton';
+import type {OnExpandCallback} from 'sentry/views/replays/detail/useVirtualizedInspector';
type MouseCallback = (frame: ReplayFrame, nodeId?: number) => void;
@@ -52,7 +53,7 @@ const FRAMES_WITH_BUTTONS = ['replay.hydrate-error'];
interface Props {
frame: ReplayFrame;
onClick: null | MouseCallback;
- onInspectorExpanded: (path: string, expandedState: Record) => void;
+ onInspectorExpanded: OnExpandCallback;
onMouseEnter: MouseCallback;
onMouseLeave: MouseCallback;
startTimestampMs: number;
@@ -236,7 +237,7 @@ function WebVitalData({
}: {
expandPaths: string[] | undefined;
frame: WebVitalFrame;
- onInspectorExpanded: (path: string, expandedState: Record) => void;
+ onInspectorExpanded: OnExpandCallback;
onMouseEnter: MouseCallback;
onMouseLeave: MouseCallback;
selectors: Map | undefined;
diff --git a/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx b/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
index 2ce28b8155ccaa..f180102919ed2f 100644
--- a/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
+++ b/static/app/components/replays/preferences/replayPreferenceDropdown.stories.tsx
@@ -1,6 +1,5 @@
import {Fragment} from 'react';
-import ObjectInspector from 'sentry/components/objectInspector';
import ReplayPreferenceDropdown from 'sentry/components/replays/preferences/replayPreferenceDropdown';
import {
LocalStorageReplayPreferences,
@@ -9,6 +8,7 @@ import {
} from 'sentry/components/replays/preferences/replayPreferences';
import JSXNode from 'sentry/components/stories/jsxNode';
import SideBySide from 'sentry/components/stories/sideBySide';
+import StructuredEventData from 'sentry/components/structuredEventData';
import storyBook from 'sentry/stories/storyBook';
import {
ReplayPreferencesContextProvider,
@@ -98,5 +98,5 @@ export default storyBook(ReplayPreferenceDropdown, story => {
function DebugReplayPrefsState() {
const [prefs] = useReplayPrefs();
- return ;
+ return ;
}
diff --git a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx
index 9ead461ded05e4..5ee74784844b01 100644
--- a/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx
+++ b/static/app/views/replays/detail/breadcrumbs/useBreadcrumbFilters.tsx
@@ -103,7 +103,7 @@ const FILTERS = {
function useBreadcrumbFilters({frames}: Options): Return {
const {setFilter, query} = useFiltersInLocationQuery();
- // Keep a reference of object paths that are expanded (via )
+ // Keep a reference of object paths that are expanded (via )
// by log row, so they they can be restored as the Console pane is scrolling.
// Due to virtualization, components can be unmounted as the user scrolls, so
// state needs to be remembered.
diff --git a/static/app/views/replays/detail/console/consoleLogRow.tsx b/static/app/views/replays/detail/console/consoleLogRow.tsx
index 34efe608305980..25f24e21f0a704 100644
--- a/static/app/views/replays/detail/console/consoleLogRow.tsx
+++ b/static/app/views/replays/detail/console/consoleLogRow.tsx
@@ -12,17 +12,20 @@ import type useCrumbHandlers from 'sentry/utils/replays/hooks/useCrumbHandlers';
import type {BreadcrumbFrame, ConsoleFrame} from 'sentry/utils/replays/types';
import MessageFormatter from 'sentry/views/replays/detail/console/messageFormatter';
import TimestampButton from 'sentry/views/replays/detail/timestampButton';
-import type {OnDimensionChange} from 'sentry/views/replays/detail/useVirtualizedInspector';
interface Props extends ReturnType {
currentHoverTime: number | undefined;
currentTime: number;
frame: BreadcrumbFrame;
index: number;
+ onDimensionChange: (
+ index: number,
+ path: string,
+ expandedState: Record
+ ) => void;
startTimestampMs: number;
style: CSSProperties;
expandPaths?: string[];
- onDimensionChange?: OnDimensionChange;
}
export default function ConsoleLogRow({
@@ -39,7 +42,7 @@ export default function ConsoleLogRow({
style,
}: Props) {
const handleDimensionChange = useCallback(
- (path, expandedState, e) => onDimensionChange?.(index, path, expandedState, e),
+ (path, expandedState) => onDimensionChange?.(index, path, expandedState),
[onDimensionChange, index]
);
diff --git a/static/app/views/replays/detail/console/format.tsx b/static/app/views/replays/detail/console/format.tsx
index d8e5e32e3613c4..58d80c94198477 100644
--- a/static/app/views/replays/detail/console/format.tsx
+++ b/static/app/views/replays/detail/console/format.tsx
@@ -20,38 +20,43 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
import {Fragment} from 'react';
+import styled from '@emotion/styled';
-import type {OnExpandCallback} from 'sentry/components/objectInspector';
-import ObjectInspector from 'sentry/components/objectInspector';
+import StructuredEventData from 'sentry/components/structuredEventData';
+import type {OnExpandCallback} from 'sentry/views/replays/detail/useVirtualizedInspector';
const formatRegExp = /%[csdj%]/g;
-
interface FormatProps {
args: any[];
+ onExpand: OnExpandCallback;
expandPaths?: string[];
- onExpand?: OnExpandCallback;
}
/**
* Based on node's `util.format()`, returns a formatted "string" using the
* first argument as a printf-like format string which can contain zero or more
- * format specifiers. Uses `` to print objects.
+ * format specifiers. Uses `` to print objects.
*
* %c is ignored for now
*/
export default function Format({onExpand, expandPaths, args}: FormatProps) {
+ const onToggleExpand = (expandedPaths, path) => {
+ onExpand(path, Object.fromEntries(expandedPaths.map(item => [item, true])));
+ };
const f = args[0];
if (typeof f !== 'string') {
const objects: any[] = [];
for (let i = 0; i < args.length; i++) {
objects.push(
-
+
+
+
);
}
return {objects} ;
@@ -136,10 +141,25 @@ export default function Format({onExpand, expandPaths, args}: FormatProps) {
} else {
pieces.push(' ');
pieces.push(
-
+
+
+
);
}
}
return {pieces} ;
}
+
+const Wrapper = styled('div')`
+ pre {
+ margin: 0;
+ background: none;
+ font-size: inherit;
+ }
+`;
diff --git a/static/app/views/replays/detail/console/index.tsx b/static/app/views/replays/detail/console/index.tsx
index 0819f714192a00..acf21825a332db 100644
--- a/static/app/views/replays/detail/console/index.tsx
+++ b/static/app/views/replays/detail/console/index.tsx
@@ -75,7 +75,7 @@ function Console() {
cache={cache}
columnIndex={0}
// Set key based on filters, otherwise we can have odd expand/collapse state
- // with when filtering
+ // with when filtering
key={`${searchTerm}-${logLevel.join(',')}-${key}`}
parent={parent}
rowIndex={index}
diff --git a/static/app/views/replays/detail/console/messageFormatter.spec.tsx b/static/app/views/replays/detail/console/messageFormatter.spec.tsx
index 7a9a92382a1db4..a04a389c2f4d45 100644
--- a/static/app/views/replays/detail/console/messageFormatter.spec.tsx
+++ b/static/app/views/replays/detail/console/messageFormatter.spec.tsx
@@ -1,7 +1,7 @@
import {ReplayConsoleFrameFixture} from 'sentry-fixture/replay/replayBreadcrumbFrameData';
import {ReplayRecordFixture} from 'sentry-fixture/replayRecord';
-import {render, screen} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import {BreadcrumbLevelType} from 'sentry/types/breadcrumbs';
import hydrateBreadcrumbs from 'sentry/utils/replays/hydrateBreadcrumbs';
@@ -21,7 +21,7 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
expect(screen.getByText('This is a test')).toBeInTheDocument();
});
@@ -41,7 +41,7 @@ describe('MessageFormatter', () => {
// When the type is narrowed to `ConsoleFrame` the `data` field is forced to exist.
delete frame.data;
- render( );
+ render( {}} />);
expect(screen.getByText('This is only a test')).toBeInTheDocument();
});
@@ -59,13 +59,13 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ const {container} = render( {}} />);
expect(screen.getByText('test 1 false')).toBeInTheDocument();
- expect(screen.getByText('{}')).toBeInTheDocument();
+ expect(container).toHaveTextContent('{}');
});
- it('Should print console message correctly when it is an Error object', () => {
+ it('Should print console message correctly when it is an Error object', async function () {
const [frame] = hydrateBreadcrumbs(ReplayRecordFixture(), [
ReplayConsoleFrameFixture({
data: {
@@ -78,8 +78,10 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
+ expect(screen.getByText('1 item')).toBeInTheDocument();
+ await userEvent.click(screen.getByRole('button', {name: '1 item'}));
expect(screen.getByText('this is my error message')).toBeInTheDocument();
});
@@ -95,12 +97,12 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ const {container} = render( {}} />);
- expect(screen.getByText('{}')).toBeInTheDocument();
+ expect(container).toHaveTextContent('{}');
});
- it('Should style "%c" placeholder and print the console message correctly', () => {
+ it('Should style "%c" placeholder and print the console message correctly', async function () {
const [frame] = hydrateBreadcrumbs(ReplayRecordFixture(), [
ReplayConsoleFrameFixture({
data: {
@@ -120,18 +122,19 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ const {container} = render( {}} />);
const styledEl = screen.getByText('prev state');
expect(styledEl).toBeInTheDocument();
expect(styledEl).toHaveStyle('color: #9E9E9E;');
expect(styledEl).toHaveStyle('font-weight: bold;');
expect(styledEl).not.toHaveStyle('background-image: url(foo);');
- expect(screen.getByText('cart')).toBeInTheDocument();
- expect(screen.getByText('Array(0)')).toBeInTheDocument();
+ expect(screen.getByText('1 item')).toBeInTheDocument();
+ await userEvent.click(screen.getByRole('button', {name: '1 item'}));
+ expect(container).toHaveTextContent('cart: []');
});
- it('Should print arrays correctly', () => {
+ it('Should print arrays correctly', async function () {
const [frame] = hydrateBreadcrumbs(ReplayRecordFixture(), [
ReplayConsoleFrameFixture({
data: {
@@ -144,14 +147,13 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
expect(screen.getByText('test')).toBeInTheDocument();
- expect(screen.getByText('(2)')).toBeInTheDocument();
- // expect(screen.getByText('[')).toBeInTheDocument();
- expect(screen.getByText('"foo"')).toBeInTheDocument();
- expect(screen.getByText('"bar"')).toBeInTheDocument();
- // expect(screen.getByText(']')).toBeInTheDocument();
+ expect(screen.getByText('2 items')).toBeInTheDocument();
+ await userEvent.click(screen.getByRole('button', {name: '2 items'}));
+ expect(screen.getByText('foo')).toBeInTheDocument();
+ expect(screen.getByText('bar')).toBeInTheDocument();
});
it('Should print literal %', () => {
@@ -167,7 +169,7 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
expect(screen.getByText('This is a literal 100%')).toBeInTheDocument();
});
@@ -185,7 +187,7 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
expect(screen.getByText('Unbound placeholder %s')).toBeInTheDocument();
});
@@ -203,7 +205,7 @@ describe('MessageFormatter', () => {
}),
]);
- render( );
+ render( {}} />);
expect(screen.getByText('Placeholder myPlaceholder with 100%')).toBeInTheDocument();
});
@@ -218,7 +220,7 @@ describe('MessageFormatter', () => {
},
]);
- render( );
+ render( {}} />);
expect(screen.getByText('cypress custom breadcrumb')).toBeInTheDocument();
});
diff --git a/static/app/views/replays/detail/console/messageFormatter.tsx b/static/app/views/replays/detail/console/messageFormatter.tsx
index 14d4228b358760..dbe493c029ca55 100644
--- a/static/app/views/replays/detail/console/messageFormatter.tsx
+++ b/static/app/views/replays/detail/console/messageFormatter.tsx
@@ -1,13 +1,13 @@
-import type {OnExpandCallback} from 'sentry/components/objectInspector';
import {defined} from 'sentry/utils';
import type {BreadcrumbFrame, ConsoleFrame} from 'sentry/utils/replays/types';
import {isConsoleFrame} from 'sentry/utils/replays/types';
import Format from 'sentry/views/replays/detail/console/format';
+import type {OnExpandCallback} from 'sentry/views/replays/detail/useVirtualizedInspector';
interface Props {
frame: BreadcrumbFrame;
+ onExpand: OnExpandCallback;
expandPaths?: string[];
- onExpand?: OnExpandCallback;
}
// There is a special case where `console.error()` is called with an Error object.
@@ -71,7 +71,14 @@ export default function MessageFormatter({frame, expandPaths, onExpand}: Props)
// Some browsers won't allow you to write to error properties
}
- return ;
+ // An Error object has non enumerable attributes that we want to print
+ const fakeErrorObject = JSON.parse(
+ JSON.stringify(fakeError, Object.getOwnPropertyNames(fakeError))
+ );
+
+ return (
+
+ );
}
return (
diff --git a/static/app/views/replays/detail/console/useConsoleFilters.tsx b/static/app/views/replays/detail/console/useConsoleFilters.tsx
index f2d944cf3f8bda..fffde1d8a3992d 100644
--- a/static/app/views/replays/detail/console/useConsoleFilters.tsx
+++ b/static/app/views/replays/detail/console/useConsoleFilters.tsx
@@ -69,7 +69,7 @@ function sortBySeverity(a: string, b: string) {
function useConsoleFilters({frames}: Options): Return {
const {setFilter, query} = useFiltersInLocationQuery();
- // Keep a reference of object paths that are expanded (via )
+ // Keep a reference of object paths that are expanded (via )
// by log row, so they they can be restored as the Console pane is scrolling.
// Due to virtualization, components can be unmounted as the user scrolls, so
// state needs to be remembered.
diff --git a/static/app/views/replays/detail/useVirtualizedInspector.tsx b/static/app/views/replays/detail/useVirtualizedInspector.tsx
index 9c35613ffeca3f..769a22634d0636 100644
--- a/static/app/views/replays/detail/useVirtualizedInspector.tsx
+++ b/static/app/views/replays/detail/useVirtualizedInspector.tsx
@@ -1,4 +1,4 @@
-import type {MouseEvent, RefObject} from 'react';
+import type {RefObject} from 'react';
import {useCallback} from 'react';
import type {CellMeasurerCache, List} from 'react-virtualized';
@@ -10,11 +10,9 @@ type Opts = {
listRef: RefObject;
};
-export type OnDimensionChange = (
- index: number,
+export type OnExpandCallback = (
path: string,
- expandedState: Record,
- event: MouseEvent
+ expandedState: Record
) => void;
export default function useVirtualizedInspector({cache, listRef, expandPathsRef}: Opts) {
diff --git a/static/app/views/replays/detail/useVirtualizedList.tsx b/static/app/views/replays/detail/useVirtualizedList.tsx
index d70d53d0963ce2..41780ce1a9625b 100644
--- a/static/app/views/replays/detail/useVirtualizedList.tsx
+++ b/static/app/views/replays/detail/useVirtualizedList.tsx
@@ -18,7 +18,7 @@ function useVirtualizedList({cellMeasurer, deps, ref}: Opts) {
// Restart cache when items changes
// XXX: this has potential to break the UI, especially with dynamic content
- // in lists (e.g. ObjectInspector). Consider removing this as deps can easily
+ // in lists (e.g. StructuredEventData). Consider removing this as deps can easily
// be forgotten to be memoized.
//
// The reason for high potential to break UI: updateList clears the cache, so
diff --git a/yarn.lock b/yarn.lock
index 13a36b7f979ce0..a4a6ec76b4329d 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3216,11 +3216,6 @@
htmlparser2 "^4.1.0"
title-case "^3.0.2"
-"@sentry-internal/react-inspector@6.0.1-4":
- version "6.0.1-4"
- resolved "https://registry.yarnpkg.com/@sentry-internal/react-inspector/-/react-inspector-6.0.1-4.tgz#10758f3461cf2cf48df8c80f0514c55ca18872c5"
- integrity sha512-uL2RyvW8EqDEchnbo8Hu/c4IpBqM3LLxUpZPHs8o40kynerzPset6bC/m5SU124gEhy4PqjdvJ7DhTYR75NetQ==
-
"@sentry-internal/replay-canvas@8.28.0":
version "8.28.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-8.28.0.tgz#6a08541f9fecd912b7334c693a403469c9e34a89"
From cc1aea37bf6f8d6ce9b06ee3ff9d0e71d4d3a89b Mon Sep 17 00:00:00 2001
From: Christinarlong <60594860+Christinarlong@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:29:44 -0700
Subject: [PATCH 002/139] chore(sentry_apps): Move bases file for sentryapps to
sentry_apps (#78096)
---
pyproject.toml | 2 -
src/sentry/api/bases/__init__.py | 1 -
src/sentry/api/bases/sentryapps.py | 507 +----------------
.../sentry_apps/api/bases/sentryapps.py | 512 ++++++++++++++++++
.../api/endpoints/installation_details.py | 2 +-
.../installation_external_issue_actions.py | 2 +-
.../installation_external_issue_details.py | 2 +-
.../endpoints/installation_external_issues.py | 6 +-
.../installation_external_requests.py | 2 +-
.../api/endpoints/organization_sentry_apps.py | 2 +-
.../endpoints/sentry_app_authorizations.py | 2 +-
.../api/endpoints/sentry_app_avatar.py | 2 +-
.../api/endpoints/sentry_app_components.py | 5 +-
.../api/endpoints/sentry_app_details.py | 10 +-
.../api/endpoints/sentry_app_features.py | 2 +-
.../api/endpoints/sentry_app_installations.py | 2 +-
.../api/endpoints/sentry_app_interaction.py | 7 +-
.../endpoints/sentry_app_publish_request.py | 2 +-
.../api/endpoints/sentry_app_requests.py | 5 +-
.../api/endpoints/sentry_app_rotate_secret.py | 2 +-
.../api/endpoints/sentry_app_stats_details.py | 2 +-
.../sentry_apps/api/endpoints/sentry_apps.py | 2 +-
.../api/endpoints/sentry_apps_stats.py | 2 +-
.../sentry_internal_app_token_details.py | 5 +-
.../endpoints/sentry_internal_app_tokens.py | 5 +-
.../sentry/sentry_apps/api/bases/__init__.py | 0
.../api/bases/test_sentryapps.py | 97 +++-
27 files changed, 640 insertions(+), 550 deletions(-)
create mode 100644 src/sentry/sentry_apps/api/bases/sentryapps.py
create mode 100644 tests/sentry/sentry_apps/api/bases/__init__.py
rename tests/sentry/{ => sentry_apps}/api/bases/test_sentryapps.py (67%)
diff --git a/pyproject.toml b/pyproject.toml
index fe28edd98fe5e9..ce544044fa3f61 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -124,7 +124,6 @@ module = [
"sentry.api.bases.organizationmember",
"sentry.api.bases.project",
"sentry.api.bases.project_request_change",
- "sentry.api.bases.sentryapps",
"sentry.api.bases.team",
"sentry.api.endpoints.accept_organization_invite",
"sentry.api.endpoints.auth_config",
@@ -381,7 +380,6 @@ module = [
"sentry_plugins.jira.plugin",
"tests.sentry.api.bases.test_organization",
"tests.sentry.api.bases.test_project",
- "tests.sentry.api.bases.test_sentryapps",
"tests.sentry.api.bases.test_team",
"tests.sentry.api.endpoints.notifications.test_notification_actions_details",
"tests.sentry.api.endpoints.notifications.test_notification_actions_index",
diff --git a/src/sentry/api/bases/__init__.py b/src/sentry/api/bases/__init__.py
index eeb09250096170..bf18019cc99a8b 100644
--- a/src/sentry/api/bases/__init__.py
+++ b/src/sentry/api/bases/__init__.py
@@ -4,5 +4,4 @@
from .organizationmember import * # NOQA
from .project import * # NOQA
from .project_transaction_threshold_override import * # NOQA
-from .sentryapps import * # NOQA
from .team import * # NOQA
diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py
index 242c81858f2f25..b1e5512dce5ed4 100644
--- a/src/sentry/api/bases/sentryapps.py
+++ b/src/sentry/api/bases/sentryapps.py
@@ -1,498 +1,13 @@
-from __future__ import annotations
-
-import logging
-from functools import wraps
-from typing import Any
-
-from django.http import Http404
-from rest_framework.exceptions import PermissionDenied
-from rest_framework.permissions import BasePermission
-from rest_framework.request import Request
-from rest_framework.response import Response
-from rest_framework.serializers import ValidationError
-
-from sentry.api.authentication import ClientIdSecretAuthentication
-from sentry.api.base import Endpoint
-from sentry.api.permissions import SentryPermission, StaffPermissionMixin
-from sentry.auth.staff import is_active_staff
-from sentry.auth.superuser import is_active_superuser, superuser_has_permission
-from sentry.coreapi import APIError
-from sentry.integrations.api.bases.integration import PARANOID_GET
-from sentry.middleware.stats import add_request_metric_tags
-from sentry.models.organization import OrganizationStatus
-from sentry.organizations.services.organization import (
- RpcUserOrganizationContext,
- organization_service,
+from sentry.sentry_apps.api.bases.sentryapps import (
+ RegionSentryAppBaseEndpoint,
+ SentryAppBaseEndpoint,
+ SentryAppInstallationBaseEndpoint,
+ SentryAppInstallationsBaseEndpoint,
)
-from sentry.sentry_apps.models.sentry_app import SentryApp
-from sentry.sentry_apps.services.app import RpcSentryApp, app_service
-from sentry.users.services.user import RpcUser
-from sentry.users.services.user.service import user_service
-from sentry.utils.sdk import Scope
-from sentry.utils.strings import to_single_line_str
-
-COMPONENT_TYPES = ["stacktrace-link", "issue-link"]
-
-logger = logging.getLogger(__name__)
-
-
-def catch_raised_errors(func):
- @wraps(func)
- def wrapped(self, *args, **kwargs):
- try:
- return func(self, *args, **kwargs)
- except APIError as e:
- return Response({"detail": e.msg}, status=400)
-
- return wrapped
-
-
-def ensure_scoped_permission(request, allowed_scopes):
- """
- Verifies the User making the request has at least one required scope for
- the endpoint being requested.
-
- If no scopes were specified in a ``scope_map``, it means the endpoint should
- not be accessible. That is, this function expects every accessible endpoint
- to have a list of scopes.
-
- That list of scopes may be empty, implying that the User does not need any
- specific scope and the endpoint is public.
- """
- # If no scopes were found at all, the endpoint should not be accessible.
- if allowed_scopes is None:
- return False
-
- # If there are no scopes listed, it implies a public endpoint.
- if len(allowed_scopes) == 0:
- return True
-
- return any(request.access.has_scope(s) for s in set(allowed_scopes))
-
-
-def add_integration_platform_metric_tag(func):
- @wraps(func)
- def wrapped(self, *args, **kwargs):
- add_request_metric_tags(self.request, integration_platform=True)
- return func(self, *args, **kwargs)
-
- return wrapped
-
-
-class SentryAppsPermission(SentryPermission):
- scope_map = {
- "GET": PARANOID_GET,
- "POST": ("org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, context: RpcUserOrganizationContext):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, context)
-
- if superuser_has_permission(request):
- return True
-
- # User must be a part of the Org they're trying to create the app in.
- if context.organization.status != OrganizationStatus.ACTIVE or not context.member:
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppsAndStaffPermission(StaffPermissionMixin, SentryAppsPermission):
- """Allows staff to access the GET method of sentry apps endpoints."""
-
- staff_allowed_methods = {"GET"}
-
-
-class IntegrationPlatformEndpoint(Endpoint):
- def dispatch(self, request, *args, **kwargs):
- add_request_metric_tags(request, integration_platform=True)
- return super().dispatch(request, *args, **kwargs)
-
-
-class SentryAppsBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppsAndStaffPermission,)
-
- def _get_organization_slug(self, request: Request):
- organization_slug = request.json_body.get("organization")
- if not organization_slug or not isinstance(organization_slug, str):
- error_message = "Please provide a valid value for the 'organization' field."
- raise ValidationError({"organization": to_single_line_str(error_message)})
- return organization_slug
-
- def _get_organization_for_superuser_or_staff(
- self, user: RpcUser, organization_slug: str
- ) -> RpcUserOrganizationContext:
- context = organization_service.get_organization_by_slug(
- slug=organization_slug, only_visible=False, user_id=user.id
- )
-
- if context is None:
- error_message = f"Organization '{organization_slug}' does not exist."
- raise ValidationError({"organization": to_single_line_str(error_message)})
-
- return context
-
- def _get_organization_for_user(
- self, user: RpcUser, organization_slug: str
- ) -> RpcUserOrganizationContext:
- context = organization_service.get_organization_by_slug(
- slug=organization_slug, only_visible=True, user_id=user.id
- )
- if context is None or context.member is None:
- error_message = f"User does not belong to the '{organization_slug}' organization."
- raise PermissionDenied(to_single_line_str(error_message))
- return context
-
- def _get_org_context(self, request: Request) -> RpcUserOrganizationContext:
- organization_slug = self._get_organization_slug(request)
- if is_active_superuser(request) or is_active_staff(request):
- return self._get_organization_for_superuser_or_staff(request.user, organization_slug)
- else:
- return self._get_organization_for_user(request.user, organization_slug)
-
- def convert_args(self, request: Request, *args, **kwargs):
- """
- This baseclass is the SentryApp collection endpoints:
-
- [GET, POST] /sentry-apps
-
- The GET endpoint is public and doesn't require (or handle) any query
- params or request body.
-
- The POST endpoint is for creating a Sentry App. Part of that creation
- is associating it with the Organization that it's created within.
-
- So in the case of POST requests, we want to pull the Organization out
- of the request body so that we can ensure the User making the request
- has access to it.
-
- Since ``convert_args`` is conventionally where you materialize model
- objects from URI params, we're applying the same logic for a param in
- the request body.
- """
- if not request.json_body:
- return (args, kwargs)
-
- context = self._get_org_context(request)
- self.check_object_permissions(request, context)
- kwargs["organization"] = context.organization
-
- return (args, kwargs)
-
-
-class SentryAppPermission(SentryPermission):
- unpublished_scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "PUT": ("org:write", "org:admin"),
- "POST": ("org:admin",), # used for publishing an app
- "DELETE": ("org:admin",),
- }
-
- published_scope_map = {
- "GET": PARANOID_GET,
- "PUT": ("org:write", "org:admin"),
- "POST": ("org:admin",),
- "DELETE": ("org:admin",),
- }
-
- @property
- def scope_map(self):
- return self.published_scope_map
-
- def has_object_permission(self, request: Request, view, sentry_app: RpcSentryApp | SentryApp):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- self.determine_access(request, owner_app)
-
- if superuser_has_permission(request):
- return True
-
- organizations = (
- user_service.get_organizations(user_id=request.user.id)
- if request.user.id is not None
- else ()
- )
- # if app is unpublished, user must be in the Org who owns the app.
- if not sentry_app.is_published:
- if not any(sentry_app.owner_id == org.id for org in organizations):
- raise Http404
-
- # TODO(meredith): make a better way to allow for public
- # endpoints. we can't use ensure_scoped_permission now
- # that the public endpoint isn't denoted by '()'
- if sentry_app.is_published and request.method == "GET":
- return True
-
- return ensure_scoped_permission(
- request, self._scopes_for_sentry_app(sentry_app).get(request.method)
- )
-
- def _scopes_for_sentry_app(self, sentry_app):
- if sentry_app.is_published:
- return self.published_scope_map
- else:
- return self.unpublished_scope_map
-
-
-class SentryAppAndStaffPermission(StaffPermissionMixin, SentryAppPermission):
- """Allows staff to access sentry app endpoints. Note that this is used for
- endpoints acting on a single sentry app only."""
-
- pass
-
-
-class SentryAppBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppPermission,)
- def convert_args(
- self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
- ):
- try:
- sentry_app = SentryApp.objects.get(slug__id_or_slug=sentry_app_id_or_slug)
- except SentryApp.DoesNotExist:
- raise Http404
-
- self.check_object_permissions(request, sentry_app)
-
- Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
-
- kwargs["sentry_app"] = sentry_app
- return (args, kwargs)
-
-
-class RegionSentryAppBaseEndpoint(IntegrationPlatformEndpoint):
- def convert_args(
- self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
- ):
- if str(sentry_app_id_or_slug).isdecimal():
- sentry_app = app_service.get_sentry_app_by_id(id=int(sentry_app_id_or_slug))
- else:
- sentry_app = app_service.get_sentry_app_by_slug(slug=sentry_app_id_or_slug)
- if sentry_app is None:
- raise Http404
-
- self.check_object_permissions(request, sentry_app)
-
- Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
-
- kwargs["sentry_app"] = sentry_app
- return (args, kwargs)
-
-
-class SentryAppInstallationsPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "POST": ("org:integrations", "org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, organization):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, organization)
-
- if superuser_has_permission(request):
- return True
-
- organizations = (
- user_service.get_organizations(user_id=request.user.id)
- if request.user.id is not None
- else ()
- )
- if not any(organization.id == org.id for org in organizations):
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppInstallationsBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes = (SentryAppInstallationsPermission,)
-
- def convert_args(self, request: Request, organization_id_or_slug, *args, **kwargs):
- extra_args = {}
- # We need to pass user_id if the user is not a superuser
- if not is_active_superuser(request):
- extra_args["user_id"] = request.user.id
-
- if str(organization_id_or_slug).isdecimal():
- organization = organization_service.get_org_by_id(
- id=int(organization_id_or_slug), **extra_args
- )
- else:
- organization = organization_service.get_org_by_slug(
- slug=str(organization_id_or_slug), **extra_args
- )
-
- if organization is None:
- raise Http404
- self.check_object_permissions(request, organization)
-
- kwargs["organization"] = organization
- return (args, kwargs)
-
-
-class SentryAppInstallationPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- "DELETE": ("org:integrations", "org:write", "org:admin"),
- # NOTE(mn): The only POST endpoint right now is to create External
- # Issues, which uses this baseclass since it's nested under an
- # installation.
- #
- # The scopes below really only make sense for that endpoint. Any other
- # nested endpoints will probably need different scopes - figure out how
- # to deal with that when it happens.
- "POST": ("org:integrations", "event:write", "event:admin"),
- }
-
- def has_permission(self, request: Request, *args, **kwargs):
- # To let the app mark the installation as installed, we don't care about permissions
- if (
- hasattr(request, "user")
- and hasattr(request.user, "is_sentry_app")
- and request.user.is_sentry_app
- and request.method == "PUT"
- ):
- return True
- return super().has_permission(request, *args, **kwargs)
-
- def has_object_permission(self, request: Request, view, installation):
- if not hasattr(request, "user") or not request.user:
- return False
-
- self.determine_access(request, installation.organization_id)
-
- if superuser_has_permission(request):
- return True
-
- # if user is an app, make sure it's for that same app
- if request.user.is_sentry_app:
- return request.user.id == installation.sentry_app.proxy_user_id
-
- org_context = organization_service.get_organization_by_id(
- id=installation.organization_id,
- user_id=request.user.id,
- include_teams=False,
- include_projects=False,
- )
- if (
- org_context.member is None
- or org_context.organization.status != OrganizationStatus.ACTIVE
- ):
- raise Http404
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppInstallationBaseEndpoint(IntegrationPlatformEndpoint):
- permission_classes: tuple[type[BasePermission], ...] = (SentryAppInstallationPermission,)
-
- def convert_args(self, request: Request, uuid, *args, **kwargs):
- installations = app_service.get_many(filter=dict(uuids=[uuid]))
- installation = installations[0] if installations else None
- if installation is None:
- raise Http404
-
- self.check_object_permissions(request, installation)
-
- Scope.get_isolation_scope().set_tag("sentry_app_installation", installation.uuid)
-
- kwargs["installation"] = installation
- return (args, kwargs)
-
-
-class SentryAppInstallationExternalIssuePermission(SentryAppInstallationPermission):
- scope_map = {
- "POST": ("event:read", "event:write", "event:admin"),
- "DELETE": ("event:admin",),
- }
-
-
-class SentryAppInstallationExternalIssueBaseEndpoint(SentryAppInstallationBaseEndpoint):
- permission_classes = (SentryAppInstallationExternalIssuePermission,)
-
-
-class SentryAppAuthorizationsPermission(SentryPermission):
- def has_object_permission(self, request: Request, view, installation):
- if not hasattr(request, "user") or not request.user:
- return False
-
- installation_org_context = organization_service.get_organization_by_id(
- id=installation.organization_id, user_id=request.user.id
- )
- self.determine_access(request, installation_org_context)
-
- if not request.user.is_sentry_app:
- return False
-
- # Request must be made as the app's Proxy User, using their Client ID
- # and Secret.
- return request.user.id == installation.sentry_app.proxy_user_id
-
-
-class SentryAppAuthorizationsBaseEndpoint(SentryAppInstallationBaseEndpoint):
- authentication_classes = (ClientIdSecretAuthentication,)
- permission_classes = (SentryAppAuthorizationsPermission,)
-
-
-class SentryInternalAppTokenPermission(SentryPermission):
- scope_map = {
- "GET": ("org:write", "org:admin"),
- "POST": ("org:write", "org:admin"),
- "DELETE": ("org:write", "org:admin"),
- }
-
- def has_object_permission(self, request: Request, view, sentry_app):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- self.determine_access(request, owner_app)
-
- if superuser_has_permission(request):
- return True
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
-
-
-class SentryAppStatsPermission(SentryPermission):
- scope_map = {
- "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
- # Anyone logged in can increment the stats, so leave the scopes empty
- # Note: this only works for session-based auth so you cannot increment stats through API
- "POST": (),
- }
-
- def has_object_permission(self, request: Request, view, sentry_app: SentryApp | RpcSentryApp):
- if not hasattr(request, "user") or not request.user:
- return False
-
- owner_app = organization_service.get_organization_by_id(
- id=sentry_app.owner_id, user_id=request.user.id
- )
- if owner_app is None:
- logger.error(
- "sentry_app_stats.permission_org_not_found",
- extra={
- "sentry_app_id": sentry_app.id,
- "owner_org_id": sentry_app.owner_id,
- "user_id": request.user.id,
- },
- )
- return False
- self.determine_access(request, owner_app)
-
- if is_active_superuser(request):
- return True
-
- return ensure_scoped_permission(request, self.scope_map.get(request.method))
+__all__ = (
+ "SentryAppBaseEndpoint",
+ "RegionSentryAppBaseEndpoint",
+ "SentryAppInstallationBaseEndpoint",
+ "SentryAppInstallationsBaseEndpoint",
+)
diff --git a/src/sentry/sentry_apps/api/bases/sentryapps.py b/src/sentry/sentry_apps/api/bases/sentryapps.py
new file mode 100644
index 00000000000000..1ce641f943707d
--- /dev/null
+++ b/src/sentry/sentry_apps/api/bases/sentryapps.py
@@ -0,0 +1,512 @@
+from __future__ import annotations
+
+import logging
+from collections.abc import Sequence
+from functools import wraps
+from typing import Any
+
+from django.http import Http404
+from rest_framework.exceptions import PermissionDenied
+from rest_framework.permissions import BasePermission
+from rest_framework.request import Request
+from rest_framework.response import Response
+from rest_framework.serializers import ValidationError
+
+from sentry.api.authentication import ClientIdSecretAuthentication
+from sentry.api.base import Endpoint
+from sentry.api.permissions import SentryPermission, StaffPermissionMixin
+from sentry.auth.staff import is_active_staff
+from sentry.auth.superuser import is_active_superuser, superuser_has_permission
+from sentry.coreapi import APIError
+from sentry.integrations.api.bases.integration import PARANOID_GET
+from sentry.middleware.stats import add_request_metric_tags
+from sentry.models.organization import OrganizationStatus
+from sentry.organizations.services.organization import (
+ RpcUserOrganizationContext,
+ organization_service,
+)
+from sentry.sentry_apps.models.sentry_app import SentryApp
+from sentry.sentry_apps.services.app import RpcSentryApp, app_service
+from sentry.users.models.user import User
+from sentry.users.services.user import RpcUser
+from sentry.users.services.user.service import user_service
+from sentry.utils.sdk import Scope
+from sentry.utils.strings import to_single_line_str
+
+COMPONENT_TYPES = ["stacktrace-link", "issue-link"]
+
+logger = logging.getLogger(__name__)
+
+
+def catch_raised_errors(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ try:
+ return func(self, *args, **kwargs)
+ except APIError as e:
+ return Response({"detail": e.msg}, status=400)
+
+ return wrapped
+
+
+def ensure_scoped_permission(request: Request, allowed_scopes: Sequence[str] | None) -> bool:
+ """
+ Verifies the User making the request has at least one required scope for
+ the endpoint being requested.
+
+ If no scopes were specified in a ``scope_map``, it means the endpoint should
+ not be accessible. That is, this function expects every accessible endpoint
+ to have a list of scopes.
+
+ That list of scopes may be empty, implying that the User does not need any
+ specific scope and the endpoint is public.
+ """
+ # If no scopes were found at all, the endpoint should not be accessible.
+ if allowed_scopes is None:
+ return False
+
+ # If there are no scopes listed, it implies a public endpoint.
+ if len(allowed_scopes) == 0:
+ return True
+
+ return any(request.access.has_scope(s) for s in set(allowed_scopes))
+
+
+def add_integration_platform_metric_tag(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ add_request_metric_tags(self.request, integration_platform=True)
+ return func(self, *args, **kwargs)
+
+ return wrapped
+
+
+class SentryAppsPermission(SentryPermission):
+ scope_map = {
+ "GET": PARANOID_GET,
+ "POST": ("org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, context: RpcUserOrganizationContext):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ self.determine_access(request, context)
+
+ if superuser_has_permission(request):
+ return True
+
+ # User must be a part of the Org they're trying to create the app in.
+ if context.organization.status != OrganizationStatus.ACTIVE or not context.member:
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppsAndStaffPermission(StaffPermissionMixin, SentryAppsPermission):
+ """Allows staff to access the GET method of sentry apps endpoints."""
+
+ staff_allowed_methods = {"GET"}
+
+
+class IntegrationPlatformEndpoint(Endpoint):
+ def dispatch(self, request, *args, **kwargs):
+ add_request_metric_tags(request, integration_platform=True)
+ return super().dispatch(request, *args, **kwargs)
+
+
+class SentryAppsBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppsAndStaffPermission,)
+
+ def _get_organization_slug(self, request: Request):
+ organization_slug = request.json_body.get("organization")
+ if not organization_slug or not isinstance(organization_slug, str):
+ error_message = "Please provide a valid value for the 'organization' field."
+ raise ValidationError({"organization": to_single_line_str(error_message)})
+ return organization_slug
+
+ def _get_organization_for_superuser_or_staff(
+ self, user: RpcUser | User, organization_slug: str
+ ) -> RpcUserOrganizationContext:
+ context = organization_service.get_organization_by_slug(
+ slug=organization_slug, only_visible=False, user_id=user.id
+ )
+
+ if context is None:
+ error_message = f"Organization '{organization_slug}' does not exist."
+ raise ValidationError({"organization": to_single_line_str(error_message)})
+
+ return context
+
+ def _get_organization_for_user(
+ self, user: RpcUser | User, organization_slug: str
+ ) -> RpcUserOrganizationContext:
+ context = organization_service.get_organization_by_slug(
+ slug=organization_slug, only_visible=True, user_id=user.id
+ )
+ if context is None or context.member is None:
+ error_message = f"User does not belong to the '{organization_slug}' organization."
+ raise PermissionDenied(to_single_line_str(error_message))
+ return context
+
+ def _get_org_context(self, request: Request) -> RpcUserOrganizationContext:
+ organization_slug = self._get_organization_slug(request)
+ assert request.user.is_authenticated, "User must be authenticated to get organization"
+
+ if is_active_superuser(request) or is_active_staff(request):
+ return self._get_organization_for_superuser_or_staff(request.user, organization_slug)
+ else:
+ return self._get_organization_for_user(request.user, organization_slug)
+
+ def convert_args(self, request: Request, *args, **kwargs):
+ """
+ This baseclass is the SentryApp collection endpoints:
+
+ [GET, POST] /sentry-apps
+
+ The GET endpoint is public and doesn't require (or handle) any query
+ params or request body.
+
+ The POST endpoint is for creating a Sentry App. Part of that creation
+ is associating it with the Organization that it's created within.
+
+ So in the case of POST requests, we want to pull the Organization out
+ of the request body so that we can ensure the User making the request
+ has access to it.
+
+ Since ``convert_args`` is conventionally where you materialize model
+ objects from URI params, we're applying the same logic for a param in
+ the request body.
+ """
+ if not request.json_body:
+ return (args, kwargs)
+
+ context = self._get_org_context(request)
+ self.check_object_permissions(request, context)
+ kwargs["organization"] = context.organization
+
+ return (args, kwargs)
+
+
+class SentryAppPermission(SentryPermission):
+ unpublished_scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "PUT": ("org:write", "org:admin"),
+ "POST": ("org:admin",), # used for publishing an app
+ "DELETE": ("org:admin",),
+ }
+
+ published_scope_map = {
+ "GET": PARANOID_GET,
+ "PUT": ("org:write", "org:admin"),
+ "POST": ("org:admin",),
+ "DELETE": ("org:admin",),
+ }
+
+ @property
+ def scope_map(self):
+ return self.published_scope_map
+
+ def has_object_permission(self, request: Request, view, sentry_app: RpcSentryApp | SentryApp):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+ assert owner_app, f"owner organization for {sentry_app.name} was not found"
+ self.determine_access(request, owner_app)
+
+ if superuser_has_permission(request):
+ return True
+
+ organizations = (
+ user_service.get_organizations(user_id=request.user.id)
+ if request.user.id is not None
+ else ()
+ )
+ # if app is unpublished, user must be in the Org who owns the app.
+ if not sentry_app.is_published:
+ if not any(sentry_app.owner_id == org.id for org in organizations):
+ raise Http404
+
+ # TODO(meredith): make a better way to allow for public
+ # endpoints. we can't use ensure_scoped_permission now
+ # that the public endpoint isn't denoted by '()'
+ if sentry_app.is_published and request.method == "GET":
+ return True
+
+ return ensure_scoped_permission(
+ request, self._scopes_for_sentry_app(sentry_app).get(request.method)
+ )
+
+ def _scopes_for_sentry_app(self, sentry_app):
+ if sentry_app.is_published:
+ return self.published_scope_map
+ else:
+ return self.unpublished_scope_map
+
+
+class SentryAppAndStaffPermission(StaffPermissionMixin, SentryAppPermission):
+ """Allows staff to access sentry app endpoints. Note that this is used for
+ endpoints acting on a single sentry app only."""
+
+ pass
+
+
+class SentryAppBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppPermission,)
+
+ def convert_args(
+ self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
+ ):
+ try:
+ sentry_app = SentryApp.objects.get(slug__id_or_slug=sentry_app_id_or_slug)
+ except SentryApp.DoesNotExist:
+ raise Http404
+
+ self.check_object_permissions(request, sentry_app)
+
+ Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
+
+ kwargs["sentry_app"] = sentry_app
+ return (args, kwargs)
+
+
+class RegionSentryAppBaseEndpoint(IntegrationPlatformEndpoint):
+ def convert_args(
+ self, request: Request, sentry_app_id_or_slug: int | str, *args: Any, **kwargs: Any
+ ):
+ if str(sentry_app_id_or_slug).isdecimal():
+ sentry_app = app_service.get_sentry_app_by_id(id=int(sentry_app_id_or_slug))
+ else:
+ sentry_app = app_service.get_sentry_app_by_slug(slug=sentry_app_id_or_slug)
+ if sentry_app is None:
+ raise Http404
+
+ self.check_object_permissions(request, sentry_app)
+
+ Scope.get_isolation_scope().set_tag("sentry_app", sentry_app.slug)
+
+ kwargs["sentry_app"] = sentry_app
+ return (args, kwargs)
+
+
+class SentryAppInstallationsPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "POST": ("org:integrations", "org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, organization):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ self.determine_access(request, organization)
+
+ if superuser_has_permission(request):
+ return True
+
+ organizations = (
+ user_service.get_organizations(user_id=request.user.id)
+ if request.user.id is not None
+ else ()
+ )
+ if not any(organization.id == org.id for org in organizations):
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppInstallationsBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes = (SentryAppInstallationsPermission,)
+
+ def convert_args(self, request: Request, organization_id_or_slug, *args, **kwargs):
+ extra_args = {}
+ # We need to pass user_id if the user is not a superuser
+ if not is_active_superuser(request):
+ extra_args["user_id"] = request.user.id
+
+ if str(organization_id_or_slug).isdecimal():
+ organization = organization_service.get_org_by_id(
+ id=int(organization_id_or_slug), **extra_args
+ )
+ else:
+ organization = organization_service.get_org_by_slug(
+ slug=str(organization_id_or_slug), **extra_args
+ )
+
+ if organization is None:
+ raise Http404
+ self.check_object_permissions(request, organization)
+
+ kwargs["organization"] = organization
+ return (args, kwargs)
+
+
+class SentryAppInstallationPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ "DELETE": ("org:integrations", "org:write", "org:admin"),
+ # NOTE(mn): The only POST endpoint right now is to create External
+ # Issues, which uses this baseclass since it's nested under an
+ # installation.
+ #
+ # The scopes below really only make sense for that endpoint. Any other
+ # nested endpoints will probably need different scopes - figure out how
+ # to deal with that when it happens.
+ "POST": ("org:integrations", "event:write", "event:admin"),
+ }
+
+ def has_permission(self, request: Request, *args, **kwargs):
+ # To let the app mark the installation as installed, we don't care about permissions
+ if (
+ hasattr(request, "user")
+ and hasattr(request.user, "is_sentry_app")
+ and request.user.is_sentry_app
+ and request.method == "PUT"
+ ):
+ return True
+ return super().has_permission(request, *args, **kwargs)
+
+ def has_object_permission(self, request: Request, view, installation):
+ if not hasattr(request, "user") or not request.user or not request.user.is_authenticated:
+ return False
+
+ self.determine_access(request, installation.organization_id)
+
+ if superuser_has_permission(request):
+ return True
+
+ # if user is an app, make sure it's for that same app
+ if request.user.is_sentry_app:
+ return request.user.id == installation.sentry_app.proxy_user_id
+
+ org_context = organization_service.get_organization_by_id(
+ id=installation.organization_id,
+ user_id=request.user.id,
+ include_teams=False,
+ include_projects=False,
+ )
+ if (
+ not org_context
+ or not org_context.member
+ or org_context.organization.status != OrganizationStatus.ACTIVE
+ ):
+ raise Http404
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppInstallationBaseEndpoint(IntegrationPlatformEndpoint):
+ permission_classes: tuple[type[BasePermission], ...] = (SentryAppInstallationPermission,)
+
+ def convert_args(self, request: Request, uuid, *args, **kwargs):
+ installations = app_service.get_many(filter=dict(uuids=[uuid]))
+ installation = installations[0] if installations else None
+ if installation is None:
+ raise Http404
+
+ self.check_object_permissions(request, installation)
+
+ Scope.get_isolation_scope().set_tag("sentry_app_installation", installation.uuid)
+
+ kwargs["installation"] = installation
+ return (args, kwargs)
+
+
+class SentryAppInstallationExternalIssuePermission(SentryAppInstallationPermission):
+ scope_map = {
+ "POST": ("event:read", "event:write", "event:admin"),
+ "DELETE": ("event:admin",),
+ }
+
+
+class SentryAppInstallationExternalIssueBaseEndpoint(SentryAppInstallationBaseEndpoint):
+ permission_classes = (SentryAppInstallationExternalIssuePermission,)
+
+
+class SentryAppAuthorizationsPermission(SentryPermission):
+ def has_object_permission(self, request: Request, view, installation):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ installation_org_context = organization_service.get_organization_by_id(
+ id=installation.organization_id, user_id=request.user.id
+ )
+ assert installation_org_context, "organization for installation was not found"
+ self.determine_access(request, installation_org_context)
+
+ if not request.user.is_authenticated or not request.user.is_sentry_app:
+ return False
+
+ # Request must be made as the app's Proxy User, using their Client ID
+ # and Secret.
+ return request.user.id == installation.sentry_app.proxy_user_id
+
+
+class SentryAppAuthorizationsBaseEndpoint(SentryAppInstallationBaseEndpoint):
+ authentication_classes = (ClientIdSecretAuthentication,)
+ permission_classes = (SentryAppAuthorizationsPermission,)
+
+
+class SentryInternalAppTokenPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:write", "org:admin"),
+ "POST": ("org:write", "org:admin"),
+ "DELETE": ("org:write", "org:admin"),
+ }
+
+ def has_object_permission(self, request: Request, view, sentry_app):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+
+ assert owner_app, "Failed to get organization/owner_app to check in has_object_permission"
+ self.determine_access(request, owner_app)
+
+ if superuser_has_permission(request):
+ return True
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
+
+
+class SentryAppStatsPermission(SentryPermission):
+ scope_map = {
+ "GET": ("org:read", "org:integrations", "org:write", "org:admin"),
+ # Anyone logged in can increment the stats, so leave the scopes empty
+ # Note: this only works for session-based auth so you cannot increment stats through API
+ "POST": (),
+ }
+
+ def has_object_permission(self, request: Request, view, sentry_app: SentryApp | RpcSentryApp):
+ if not hasattr(request, "user") or not request.user:
+ return False
+
+ owner_app = organization_service.get_organization_by_id(
+ id=sentry_app.owner_id, user_id=request.user.id
+ )
+ if owner_app is None:
+ logger.error(
+ "sentry_app_stats.permission_org_not_found",
+ extra={
+ "sentry_app_id": sentry_app.id,
+ "owner_org_id": sentry_app.owner_id,
+ "user_id": request.user.id,
+ },
+ )
+ return False
+ self.determine_access(request, owner_app)
+
+ if is_active_superuser(request):
+ return True
+
+ assert request.method, "method must be present in request to get permissions"
+ return ensure_scoped_permission(request, self.scope_map.get(request.method))
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_details.py b/src/sentry/sentry_apps/api/endpoints/installation_details.py
index 19bb9b401723a8..fa7217a82d2958 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_details.py
@@ -8,10 +8,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
from sentry.api.serializers import serialize
from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
from sentry.mediators.sentry_app_installations.updater import Updater
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
from sentry.sentry_apps.api.parsers.sentry_app_installation import SentryAppInstallationParser
from sentry.sentry_apps.api.serializers.sentry_app_installation import (
SentryAppInstallationSerializer,
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
index 7afce0c8b8f416..83b4b643ad5a44 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_actions.py
@@ -5,11 +5,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
from sentry.api.serializers import serialize
from sentry.mediators.external_issues.issue_link_creator import IssueLinkCreator
from sentry.models.group import Group
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
from sentry.sentry_apps.api.serializers.platform_external_issue import (
PlatformExternalIssueSerializer,
)
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
index 4cd3a5c820c689..3bbd636b84623d 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issue_details.py
@@ -4,7 +4,7 @@
from sentry import deletions
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import (
+from sentry.sentry_apps.api.bases.sentryapps import (
SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
)
from sentry.sentry_apps.models.platformexternalissue import PlatformExternalIssue
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py b/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
index 868ef2929dcd94..aeeb1542126ca7 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_issues.py
@@ -5,13 +5,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import (
- SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
-)
from sentry.api.serializers import serialize
from sentry.mediators.external_issues.creator import Creator
from sentry.models.group import Group
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppInstallationExternalIssueBaseEndpoint as ExternalIssueBaseEndpoint,
+)
from sentry.sentry_apps.api.parsers.sentry_app import URLField
from sentry.sentry_apps.api.serializers.platform_external_issue import (
PlatformExternalIssueSerializer as ResponsePlatformExternalIssueSerializer,
diff --git a/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py b/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
index c064ca9f8c1789..3c251da6d96c56 100644
--- a/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
+++ b/src/sentry/sentry_apps/api/endpoints/installation_external_requests.py
@@ -4,9 +4,9 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import SentryAppInstallationBaseEndpoint
from sentry.mediators.external_requests.select_requester import SelectRequester
from sentry.models.project import Project
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationBaseEndpoint
@region_silo_endpoint
diff --git a/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
index 68a03a16d4f15d..13d6e9017fcc7b 100644
--- a/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
+++ b/src/sentry/sentry_apps/api/endpoints/organization_sentry_apps.py
@@ -4,13 +4,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import add_integration_platform_metric_tag
from sentry.api.bases.organization import ControlSiloOrganizationEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.constants import SentryAppStatus
from sentry.organizations.services.organization import RpcOrganization
from sentry.organizations.services.organization.model import RpcUserOrganizationContext
+from sentry.sentry_apps.api.bases.sentryapps import add_integration_platform_metric_tag
from sentry.sentry_apps.api.serializers.sentry_app import (
SentryAppSerializer as ResponseSentryAppSerializer,
)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
index af2bf55457f3c5..36cc2c1637246c 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py
@@ -7,13 +7,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppAuthorizationsBaseEndpoint
from sentry.api.serializers.models.apitoken import ApiTokenSerializer
from sentry.auth.services.auth.impl import promote_request_api_user
from sentry.coreapi import APIUnauthorized
from sentry.mediators.token_exchange.grant_exchanger import GrantExchanger
from sentry.mediators.token_exchange.refresher import Refresher
from sentry.mediators.token_exchange.util import GrantTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppAuthorizationsBaseEndpoint
logger = logging.getLogger(__name__)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
index 9b3343945d2c29..71894dc118204f 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
@@ -4,9 +4,9 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint
from sentry.api.bases.avatar import AvatarMixin
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.sentry_apps.api.parsers.sentry_app_avatar import SentryAppAvatarParser
from sentry.sentry_apps.api.serializers.sentry_app import SentryAppSerializer
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
index d9b52a70bba26c..e32767df4a9949 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py
@@ -5,7 +5,6 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, add_integration_platform_metric_tag
from sentry.api.bases.organization import ControlSiloOrganizationEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
@@ -14,6 +13,10 @@
RpcOrganization,
RpcUserOrganizationContext,
)
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ add_integration_platform_metric_tag,
+)
from sentry.sentry_apps.api.serializers.sentry_app_component import SentryAppComponentSerializer
from sentry.sentry_apps.components import SentryAppComponentPreparer
from sentry.sentry_apps.models.sentry_app_component import SentryAppComponent
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
index b7cdb9d4994c9f..a4528a9f36473b 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_details.py
@@ -11,16 +11,16 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import (
- SentryAppAndStaffPermission,
- SentryAppBaseEndpoint,
- catch_raised_errors,
-)
from sentry.api.serializers import serialize
from sentry.auth.staff import is_active_staff
from sentry.constants import SentryAppStatus
from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
from sentry.organizations.services.organization import organization_service
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppAndStaffPermission,
+ SentryAppBaseEndpoint,
+ catch_raised_errors,
+)
from sentry.sentry_apps.api.parsers.sentry_app import SentryAppParser
from sentry.sentry_apps.api.serializers.sentry_app import (
SentryAppSerializer as ResponseSentryAppSerializer,
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
index 36496266d9b9d0..c4a7ff4efad003 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_features.py
@@ -4,13 +4,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.integrations.api.serializers.models.integration_feature import (
IntegrationFeatureSerializer,
)
from sentry.integrations.models.integration_feature import IntegrationFeature, IntegrationTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
@control_silo_endpoint
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
index 95fb9f03a3bd4d..4cdb526e111825 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_installations.py
@@ -6,7 +6,6 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppInstallationsBaseEndpoint
from sentry.api.fields.sentry_slug import SentrySerializerSlugField
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
@@ -14,6 +13,7 @@
from sentry.constants import SENTRY_APP_SLUG_MAX_LENGTH, SentryAppStatus
from sentry.features.exceptions import FeatureNotRegistered
from sentry.integrations.models.integration_feature import IntegrationFeature, IntegrationTypes
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppInstallationsBaseEndpoint
from sentry.sentry_apps.api.serializers.sentry_app_installation import (
SentryAppInstallationSerializer,
)
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
index ed3bd99d45015c..819a0064bee14b 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_interaction.py
@@ -7,8 +7,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import StatsMixin, region_silo_endpoint
-from sentry.api.bases import RegionSentryAppBaseEndpoint, SentryAppStatsPermission
-from sentry.api.bases.sentryapps import COMPONENT_TYPES
+from sentry.sentry_apps.api.bases.sentryapps import (
+ COMPONENT_TYPES,
+ RegionSentryAppBaseEndpoint,
+ SentryAppStatsPermission,
+)
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.services.app import RpcSentryApp, app_service
from sentry.tsdb.base import TSDBModel
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
index 38401c4bc70635..5bbf3cf7e05f88 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_publish_request.py
@@ -7,10 +7,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import COMPONENT_TYPES, SentryAppBaseEndpoint
from sentry.constants import SentryAppStatus
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar, SentryAppAvatarTypes
from sentry.models.organizationmapping import OrganizationMapping
+from sentry.sentry_apps.api.bases.sentryapps import COMPONENT_TYPES, SentryAppBaseEndpoint
from sentry.sentry_apps.logic import SentryAppUpdater
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
index 9e3577242064b6..b35d079c8a3dd3 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_requests.py
@@ -9,9 +9,12 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
-from sentry.api.bases import RegionSentryAppBaseEndpoint, SentryAppStatsPermission
from sentry.api.serializers import serialize
from sentry.models.organization import Organization
+from sentry.sentry_apps.api.bases.sentryapps import (
+ RegionSentryAppBaseEndpoint,
+ SentryAppStatsPermission,
+)
from sentry.sentry_apps.api.serializers.request import RequestSerializer
from sentry.utils.sentry_apps import EXTENDED_VALID_EVENTS, SentryAppWebhookRequestsBuffer
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
index fa171c6f96ca95..c53b5504a9bb11 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
@@ -8,13 +8,13 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.api.permissions import SentryPermission
from sentry.api.serializers import serialize
from sentry.auth.superuser import superuser_has_permission
from sentry.constants import SentryAppStatus
from sentry.models.apiapplication import generate_token
from sentry.organizations.services.organization import organization_service
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.users.services.user.service import user_service
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
index 269cd5a4e74671..f3d1119407e0c6 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_stats_details.py
@@ -4,7 +4,7 @@
from sentry import tsdb
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import StatsMixin, control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryAppStatsPermission
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppBaseEndpoint, SentryAppStatsPermission
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
index 31665cef097ff6..efa42e1c3752ed 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps.py
@@ -9,12 +9,12 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppsBaseEndpoint
from sentry.api.paginator import OffsetPaginator
from sentry.api.serializers import serialize
from sentry.auth.staff import is_active_staff
from sentry.auth.superuser import is_active_superuser
from sentry.constants import SentryAppStatus
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppsBaseEndpoint
from sentry.sentry_apps.api.parsers.sentry_app import SentryAppParser
from sentry.sentry_apps.api.serializers.sentry_app import (
SentryAppSerializer as ResponseSentryAppSerializer,
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
index c2a4471d1d6f35..fac06f21cbccba 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_apps_stats.py
@@ -5,10 +5,10 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppsBaseEndpoint
from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission
from sentry.api.serializers import serialize
from sentry.models.avatars.sentry_app_avatar import SentryAppAvatar
+from sentry.sentry_apps.api.bases.sentryapps import SentryAppsBaseEndpoint
from sentry.sentry_apps.models.sentry_app import SentryApp
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
index 13b86f04d78b35..9af2f110bcfb22 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_token_details.py
@@ -8,8 +8,11 @@
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryInternalAppTokenPermission
from sentry.models.apitoken import ApiToken
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ SentryInternalAppTokenPermission,
+)
from sentry.sentry_apps.api.endpoints.sentry_app_details import PARTNERSHIP_RESTRICTED_ERROR_MESSAGE
from sentry.sentry_apps.models.sentry_app_installation_token import SentryAppInstallationToken
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
index a4875673edbbce..826f2f7c686a6a 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_internal_app_tokens.py
@@ -6,10 +6,13 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.authentication import SessionNoAuthTokenAuthentication
from sentry.api.base import control_silo_endpoint
-from sentry.api.bases import SentryAppBaseEndpoint, SentryInternalAppTokenPermission
from sentry.api.serializers.models.apitoken import ApiTokenSerializer
from sentry.exceptions import ApiTokenLimitError
from sentry.models.apitoken import ApiToken
+from sentry.sentry_apps.api.bases.sentryapps import (
+ SentryAppBaseEndpoint,
+ SentryInternalAppTokenPermission,
+)
from sentry.sentry_apps.api.endpoints.sentry_app_details import PARTNERSHIP_RESTRICTED_ERROR_MESSAGE
from sentry.sentry_apps.installations import SentryAppInstallationTokenCreator
from sentry.sentry_apps.models.sentry_app import MASKED_VALUE
diff --git a/tests/sentry/sentry_apps/api/bases/__init__.py b/tests/sentry/sentry_apps/api/bases/__init__.py
new file mode 100644
index 00000000000000..e69de29bb2d1d6
diff --git a/tests/sentry/api/bases/test_sentryapps.py b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py
similarity index 67%
rename from tests/sentry/api/bases/test_sentryapps.py
rename to tests/sentry/sentry_apps/api/bases/test_sentryapps.py
index 2cea01bb9e89cd..5d32a6657e8200 100644
--- a/tests/sentry/api/bases/test_sentryapps.py
+++ b/tests/sentry/sentry_apps/api/bases/test_sentryapps.py
@@ -2,10 +2,12 @@
from unittest.mock import Mock, patch
import pytest
+from django.contrib.auth.models import AnonymousUser
from django.http import Http404
from django.test.utils import override_settings
+from rest_framework.request import Request
-from sentry.api.bases.sentryapps import (
+from sentry.sentry_apps.api.bases.sentryapps import (
SentryAppAndStaffPermission,
SentryAppBaseEndpoint,
SentryAppInstallationBaseEndpoint,
@@ -16,14 +18,19 @@
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.options import override_options
from sentry.testutils.silo import control_silo_test
+from sentry.users.models.user import User
@control_silo_test
class SentryAppPermissionTest(TestCase):
def setUp(self):
+ self.endpoint = SentryAppBaseEndpoint()
self.permission = SentryAppPermission()
+
self.sentry_app = self.create_sentry_app(name="foo", organization=self.organization)
- self.request = self.make_request(user=self.user, method="GET")
+ self.request = self.endpoint.initialize_request(
+ request=self.make_request(user=self.user, method="GET"), endpoint=self.endpoint
+ )
self.superuser = self.create_user(is_superuser=True)
@@ -31,7 +38,10 @@ def test_request_user_is_app_owner_succeeds(self):
assert self.permission.has_object_permission(self.request, None, self.sentry_app)
def test_request_user_is_not_app_owner_fails(self):
- self.request.user = self.create_user()
+ non_owner: User = self.create_user()
+ self.request = self.endpoint.initialize_request(
+ request=self.make_request(user=non_owner, method="GET"), endpoint=self.endpoint
+ )
with pytest.raises(Http404):
self.permission.has_object_permission(self.request, None, self.sentry_app)
@@ -39,26 +49,39 @@ def test_request_user_is_not_app_owner_fails(self):
def test_has_permission(self):
from sentry.models.apitoken import ApiToken
- token = ApiToken.objects.create(user=self.user, scope_list=["event:read", "org:read"])
- self.request = self.make_request(user=None, auth=token, method="GET")
+ token: ApiToken = ApiToken.objects.create(
+ user=self.user, scope_list=["event:read", "org:read"]
+ )
+ request = self.make_request(user=None, auth=token, method="GET")
+
+ # Need to set token here, else UserAuthTokenAuthentication won't be able to find it & fail auth
+ request.META["HTTP_AUTHORIZATION"] = f"Bearer {token.plaintext_token}"
+ self.request = self.endpoint.initialize_request(request=request, endpoint=self.endpoint)
+
assert self.permission.has_permission(self.request, None)
def test_superuser_has_permission(self):
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.sentry_app)
- request.method = "POST"
+ request._request.method = "POST"
assert self.permission.has_object_permission(request, None, self.sentry_app)
@override_options({"superuser.read-write.ga-rollout": True})
@override_settings(SENTRY_SELF_HOSTED=False)
def test_superuser_has_permission_read_only(self):
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.sentry_app)
- request.method = "POST"
+ request._request.method = "POST"
with pytest.raises(Http404):
self.permission.has_object_permission(request, None, self.sentry_app)
@@ -67,11 +90,14 @@ def test_superuser_has_permission_read_only(self):
@override_settings(SENTRY_SELF_HOSTED=False)
def test_superuser_has_permission_write(self):
self.add_user_permission(self.superuser, "superuser.write")
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.sentry_app)
- request.method = "POST"
+ self.request._request.method = "POST"
self.permission.has_object_permission(request, None, self.sentry_app)
@@ -107,7 +133,9 @@ def test_staff_has_permission(self):
class SentryAppBaseEndpointTest(TestCase):
def setUp(self):
self.endpoint = SentryAppBaseEndpoint()
- self.request = self.make_request(user=self.user, method="GET")
+ self.request = self.endpoint.initialize_request(
+ self.make_request(user=self.user, method="GET")
+ )
self.sentry_app = self.create_sentry_app(name="foo", organization=self.organization)
def test_retrieves_sentry_app(self):
@@ -122,44 +150,62 @@ def test_raises_when_sentry_app_not_found(self):
@control_silo_test
class SentryAppInstallationPermissionTest(TestCase):
def setUp(self):
+ self.request: Request
+ self.endpoint = SentryAppInstallationBaseEndpoint()
self.permission = SentryAppInstallationPermission()
+
self.sentry_app = self.create_sentry_app(name="foo", organization=self.organization)
self.installation = self.create_sentry_app_installation(
slug=self.sentry_app.slug, organization=self.organization, user=self.user
)
- self.request = self.make_request(user=self.user, method="GET")
self.superuser = self.create_user(is_superuser=True)
def test_missing_request_user(self):
- self.request.user = None
+ self.request = self.endpoint.initialize_request(
+ self.make_request(user=AnonymousUser(), method="GET"), endpoint=self.endpoint
+ )
assert not self.permission.has_object_permission(self.request, None, self.installation)
def test_request_user_in_organization(self):
+ self.request = self.endpoint.initialize_request(
+ self.make_request(user=self.user, method="GET"), endpoint=self.endpoint
+ )
+
assert self.permission.has_object_permission(self.request, None, self.installation)
def test_request_user_not_in_organization(self):
- request = self.make_request(user=self.create_user(), method="GET")
+ user = self.create_user()
+ request = self.endpoint.initialize_request(
+ self.make_request(user=user, method="GET"), endpoint=self.endpoint
+ )
+
with pytest.raises(Http404):
self.permission.has_object_permission(request, None, self.installation)
def test_superuser_has_permission(self):
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.installation)
- request.method = "POST"
+ request._request.method = "POST"
assert self.permission.has_object_permission(request, None, self.installation)
@override_options({"superuser.read-write.ga-rollout": True})
@override_settings(SENTRY_SELF_HOSTED=False)
def test_superuser_has_permission_read_only(self):
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.installation)
- request.method = "POST"
+ request._request.method = "POST"
with pytest.raises(Http404):
self.permission.has_object_permission(request, None, self.installation)
@@ -167,11 +213,14 @@ def test_superuser_has_permission_read_only(self):
@override_settings(SENTRY_SELF_HOSTED=False)
def test_superuser_has_permission_write(self):
self.add_user_permission(self.superuser, "superuser.write")
- request = self.make_request(user=self.superuser, method="GET", is_superuser=True)
+ request = self.endpoint.initialize_request(
+ self.make_request(user=self.superuser, method="GET", is_superuser=True),
+ endpoint=self.endpoint,
+ )
assert self.permission.has_object_permission(request, None, self.installation)
- request.method = "POST"
+ request._request.method = "POST"
self.permission.has_object_permission(request, None, self.installation)
@@ -180,7 +229,9 @@ class SentryAppInstallationBaseEndpointTest(TestCase):
def setUp(self):
self.endpoint = SentryAppInstallationBaseEndpoint()
- self.request = self.make_request(user=self.user, method="GET")
+ self.request = self.endpoint.initialize_request(
+ self.make_request(user=self.user, method="GET")
+ )
self.sentry_app = self.create_sentry_app(name="foo", organization=self.organization)
self.installation = self.create_sentry_app_installation(
slug=self.sentry_app.slug, organization=self.organization, user=self.user
@@ -197,7 +248,7 @@ def test_raises_when_sentry_app_not_found(self):
@control_silo_test
class AddIntegrationPlatformMetricTagTest(unittest.TestCase):
- @patch("sentry.api.bases.sentryapps.add_request_metric_tags")
+ @patch("sentry.sentry_apps.api.bases.sentryapps.add_request_metric_tags")
def test_record_platform_integration_metric(self, add_request_metric_tags):
@add_integration_platform_metric_tag
def get(self, request, *args, **kwargs):
From f6ede600ae42a7f2bc52fa1a9dcdb254712b9759 Mon Sep 17 00:00:00 2001
From: Jenn Mueng <30991498+jennmueng@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:29:58 -0700
Subject: [PATCH 003/139] feat(autofix): Optional unit test in root cause
(#78417)
Renders the unit test to reproduce a given root cause. As this unit test
is optional, this can be merged without the seer PR.
1. Adds a unit test dropdown w/ description
2. Adds icons and spacing to the root cause context area to make it feel
less cluttered and more welcoming.
![CleanShot 2024-10-01 at 14 59
26@2x](https://github.com/user-attachments/assets/bd6793b4-f8c3-4b28-a20c-757991ff478c)
---
.../events/autofix/autofixInsightCards.tsx | 39 ++++++--
.../events/autofix/autofixRootCause.spec.tsx | 43 ++++++++
.../events/autofix/autofixRootCause.tsx | 99 +++++++++++++------
static/app/components/events/autofix/types.ts | 7 ++
4 files changed, 151 insertions(+), 37 deletions(-)
diff --git a/static/app/components/events/autofix/autofixInsightCards.tsx b/static/app/components/events/autofix/autofixInsightCards.tsx
index b0e83e9f7e787e..a42e04a90bdb26 100644
--- a/static/app/components/events/autofix/autofixInsightCards.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.tsx
@@ -70,9 +70,13 @@ function AutofixBreadcrumbSnippet({breadcrumb}: AutofixBreadcrumbSnippetProps) {
export function ExpandableInsightContext({
children,
title,
+ icon,
+ rounded,
}: {
children: React.ReactNode;
title: string;
+ icon?: React.ReactNode;
+ rounded?: boolean;
}) {
const [expanded, setExpanded] = useState(false);
@@ -81,10 +85,18 @@ export function ExpandableInsightContext({
};
return (
-
-
+
+
- {title}
+
+ {icon}
+ {title}
+
@@ -384,19 +396,34 @@ const MiniHeader = styled('p')`
padding-left: ${space(2)};
`;
-const ExpandableContext = styled('div')`
+const ExpandableContext = styled('div')<{isRounded?: boolean}>`
width: 100%;
background: ${p => p.theme.alert.info.backgroundLight};
+ border-radius: ${p => (p.isRounded ? p.theme.borderRadius : 0)};
`;
-const ContextHeader = styled(Button)`
+const ContextHeader = styled(Button)<{isExpanded?: boolean; isRounded?: boolean}>`
width: 100%;
box-shadow: none;
margin: 0;
border: none;
font-weight: normal;
background: ${p => p.theme.backgroundSecondary};
- border-radius: 0px;
+ border-radius: ${p => {
+ if (!p.isRounded) {
+ return 0;
+ }
+ if (p.isExpanded) {
+ return `${p.theme.borderRadius} ${p.theme.borderRadius} 0 0`;
+ }
+ return p.theme.borderRadius;
+ }};
+`;
+
+const ContextHeaderLeftAlign = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+ align-items: center;
`;
const ContextHeaderWrapper = styled('div')`
diff --git a/static/app/components/events/autofix/autofixRootCause.spec.tsx b/static/app/components/events/autofix/autofixRootCause.spec.tsx
index ef91264ee3b2cc..c2e19c42b0abed 100644
--- a/static/app/components/events/autofix/autofixRootCause.spec.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.spec.tsx
@@ -132,4 +132,47 @@ describe('AutofixRootCause', function () {
screen.queryByText('This is the reproduction of a root cause.')
).not.toBeInTheDocument();
});
+
+ it('shows unit test inside reproduction card when available', async function () {
+ render(
+
+ );
+
+ expect(screen.getByText('How to reproduce')).toBeInTheDocument();
+ await userEvent.click(
+ screen.getByRole('button', {
+ name: 'How to reproduce',
+ })
+ );
+ expect(
+ screen.getByText('This is the description of a unit test.')
+ ).toBeInTheDocument();
+ expect(screen.getByText('Test case for root cause')).toBeInTheDocument();
+ });
+
+ it('does not show reproduction or unit test when not available', function () {
+ render(
+
+ );
+
+ expect(screen.queryByText('How to reproduce')).not.toBeInTheDocument();
+ });
});
diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx
index 808dca0ef14ebb..b9f68d62238770 100644
--- a/static/app/components/events/autofix/autofixRootCause.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.tsx
@@ -25,6 +25,7 @@ import {
import InteractionStateLayer from 'sentry/components/interactionStateLayer';
import ExternalLink from 'sentry/components/links/externalLink';
import {Tooltip} from 'sentry/components/tooltip';
+import {IconCode, IconInfo} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {getFileExtension} from 'sentry/utils/fileExtension';
@@ -165,24 +166,69 @@ export function replaceHeadersWithBold(markdown: string) {
function RootCauseDescription({cause}: {cause: AutofixRootCauseData}) {
return (
-
-
- {cause.reproduction && (
-
-
+
+ );
+}
+
+function RootCauseContext({
+ cause,
+ repos,
+}: {
+ cause: AutofixRootCauseData;
+ repos: AutofixRepository[];
+}) {
+ const unitTestFileExtension = cause.unit_test?.file_path
+ ? getFileExtension(cause.unit_test.file_path)
+ : undefined;
+ const unitTestLanguage = unitTestFileExtension
+ ? getPrismLanguage(unitTestFileExtension)
+ : undefined;
+
+ return (
+
+ {(cause.reproduction || cause.unit_test) && (
+ }
+ title={'How to reproduce'}
+ rounded
+ >
+ {cause.reproduction && (
-
-
+ )}
+ {cause.unit_test && (
+
+ {t('Unit test that reproduces this root cause:')}
+
+
+ {cause.unit_test.snippet}
+
+
+ )}
+
)}
-
+ }
+ title={'Relevant code'}
+ rounded
+ >
+
+
+
);
}
@@ -282,9 +328,7 @@ function CauseOption({
-
-
-
+
);
@@ -292,7 +336,6 @@ function CauseOption({
function SelectedRootCauseOption({
selectedCause,
- codeContext,
repos,
}: {
codeContext: AutofixRootCauseCodeContext[];
@@ -307,9 +350,7 @@ function SelectedRootCauseOption({
}}
/>
-
-
-
+
);
}
@@ -373,18 +414,8 @@ function AutofixRootCauseDisplay({
{t('Fix This Instead')}
-
-
-
-
-
+
+
))}
@@ -503,6 +534,12 @@ const RootCauseOption = styled('div')<{selected: boolean}>`
padding-right: ${space(2)};
`;
+const RootCauseContextContainer = styled('div')`
+ display: flex;
+ flex-direction: column;
+ gap: ${space(0.5)};
+`;
+
const RootCauseOptionHeader = styled('div')`
display: flex;
justify-content: space-between;
diff --git a/static/app/components/events/autofix/types.ts b/static/app/components/events/autofix/types.ts
index 639f7a91c71124..27f9dcb310bc19 100644
--- a/static/app/components/events/autofix/types.ts
+++ b/static/app/components/events/autofix/types.ts
@@ -166,12 +166,19 @@ export type AutofixRootCauseCodeContext = {
snippet?: CodeSnippetContext;
};
+export type AutofixRootCauseUnitTest = {
+ description: string;
+ file_path: string;
+ snippet: string;
+};
+
export type AutofixRootCauseData = {
code_context: AutofixRootCauseCodeContext[];
description: string;
id: string;
title: string;
reproduction?: string;
+ unit_test?: AutofixRootCauseUnitTest;
};
export type EventMetadataWithAutofix = EventMetadata & {
From fdce8bfa8d477e9ba0f676905e1c7bb06b109525 Mon Sep 17 00:00:00 2001
From: Seiji Chew <67301797+schew2381@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:30:11 -0700
Subject: [PATCH 004/139] chore(alerts): Change sentry err capture to metric
(#78434)
Will add to dashboard once the metric shows up
Fixes SENTRY-3E2B
---
src/sentry/rules/processing/delayed_processing.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/src/sentry/rules/processing/delayed_processing.py b/src/sentry/rules/processing/delayed_processing.py
index e85b7b2f3b80fa..c3fa9fead1c41e 100644
--- a/src/sentry/rules/processing/delayed_processing.py
+++ b/src/sentry/rules/processing/delayed_processing.py
@@ -7,7 +7,6 @@
from itertools import islice
from typing import Any, DefaultDict, NamedTuple
-import sentry_sdk
from django.db.models import OuterRef, Subquery
from sentry import buffer, nodestore, options
@@ -359,12 +358,8 @@ def passes_comparison(
query_values = [
condition_group_results[unique_query][group_id] for unique_query in unique_queries
]
- except KeyError as exception:
- sentry_sdk.capture_exception(exception)
- logger.exception(
- "delayed_processing.missing_query_results",
- extra={"exception": exception, "group_id": group_id, "project_id": project_id},
- )
+ except KeyError:
+ metrics.incr("delayed_processing.missing_query_result")
return False
calculated_value = query_values[0]
From fb2e464c38436a9eb3db7a2d3c9b15898d412e7e Mon Sep 17 00:00:00 2001
From: Nate Moore
Date: Wed, 2 Oct 2024 13:30:27 -0500
Subject: [PATCH 005/139] fix(nav): improve `/issues` handling (#78437)
Fix stacked nav menu when visiting `/issues`
---
static/app/components/nav/utils.tsx | 13 ++++++-------
1 file changed, 6 insertions(+), 7 deletions(-)
diff --git a/static/app/components/nav/utils.tsx b/static/app/components/nav/utils.tsx
index 448d3e4a821211..34eb3c218d8ff8 100644
--- a/static/app/components/nav/utils.tsx
+++ b/static/app/components/nav/utils.tsx
@@ -60,15 +60,14 @@ export function isNavItemActive(
* Issue submenu is special cased because it is matched based on query params
* rather than the pathname.
*/
- if (
- location.pathname.includes('/issues/') &&
- to.includes('/issues/') &&
- to.includes('query=')
- ) {
+ if (location.pathname.includes('/issues/') && to.includes('/issues/')) {
+ const {label} = item;
+ const matches = hasMatchingQueryParam({to, label}, location);
+ const isDefault = label === 'All';
if (location.search) {
- return hasMatchingQueryParam({to, label: item.label}, location);
+ return matches || isDefault;
}
- return item.label === 'All';
+ return isDefault;
}
const normalizedTo = normalizeUrl(to);
From a48610ed05b27808942ede6201ca36a744bcb479 Mon Sep 17 00:00:00 2001
From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:30:38 -0400
Subject: [PATCH 006/139] feat(dashboards): Adds a background and shadow hover
effect to dashboard widgets (#78477)
Adds a background and shadow hover effect to dashboard widgets.
Also fixes tooltip not displaying for metric and preview widgets.
---
.../views/dashboards/metrics/widgetCard.tsx | 1 +
.../app/views/dashboards/widgetCard/index.tsx | 8 ++++++
.../widgetCard/widgetCardContextMenu.tsx | 26 ++++++++++++++++++-
3 files changed, 34 insertions(+), 1 deletion(-)
diff --git a/static/app/views/dashboards/metrics/widgetCard.tsx b/static/app/views/dashboards/metrics/widgetCard.tsx
index 59fbd329f7527d..13a1e70623c01f 100644
--- a/static/app/views/dashboards/metrics/widgetCard.tsx
+++ b/static/app/views/dashboards/metrics/widgetCard.tsx
@@ -220,6 +220,7 @@ export function MetricWidgetCard({
location={location}
onDelete={onDelete}
onDuplicate={onDuplicate}
+ title={widget.title || widgetMQL}
/>
)}
diff --git a/static/app/views/dashboards/widgetCard/index.tsx b/static/app/views/dashboards/widgetCard/index.tsx
index 4451addfaf23b5..072fd2317c89d1 100644
--- a/static/app/views/dashboards/widgetCard/index.tsx
+++ b/static/app/views/dashboards/widgetCard/index.tsx
@@ -523,6 +523,14 @@ export const WidgetCardPanel = styled(Panel, {
width: 1px;
}
}
+
+ :hover {
+ background-color: ${p => p.theme.surface200};
+ transition:
+ background-color 100ms linear,
+ box-shadow 100ms linear;
+ box-shadow: ${p => p.theme.dropShadowLight};
+ }
`;
const StoredDataAlert = styled(Alert)`
diff --git a/static/app/views/dashboards/widgetCard/widgetCardContextMenu.tsx b/static/app/views/dashboards/widgetCard/widgetCardContextMenu.tsx
index 75b98d2eda9045..f98aaa2d4901ef 100644
--- a/static/app/views/dashboards/widgetCard/widgetCardContextMenu.tsx
+++ b/static/app/views/dashboards/widgetCard/widgetCardContextMenu.tsx
@@ -1,3 +1,4 @@
+import type React from 'react';
import styled from '@emotion/styled';
import type {Location} from 'history';
@@ -54,7 +55,7 @@ type Props = {
seriesResultsType?: Record;
showContextMenu?: boolean;
tableData?: TableDataWithTitle[];
- title?: string;
+ title?: string | React.ReactNode;
totalIssuesCount?: string;
};
@@ -117,6 +118,29 @@ function WidgetCardContextMenu({
{t('Indexed')}
)}
+ {title && (
+
+ {title}
+ {description && (
+
+ {description}
+
+ )}
+
+ }
+ containerDisplayMode="grid"
+ isHoverable
+ >
+ }
+ />
+
+ )}
Date: Wed, 2 Oct 2024 20:30:52 +0200
Subject: [PATCH 007/139] ref(quick-start): Add 'quick_start' analytics
(#78459)
---
.../app/components/onboardingWizard/task.tsx | 2 +-
.../components/sidebar/onboardingStatus.tsx | 24 ++++++++++++-------
static/app/utils/analytics.tsx | 6 +++++
.../analytics/quickStartAnalyticsEvents.tsx | 13 ++++++++++
4 files changed, 35 insertions(+), 10 deletions(-)
create mode 100644 static/app/utils/analytics/quickStartAnalyticsEvents.tsx
diff --git a/static/app/components/onboardingWizard/task.tsx b/static/app/components/onboardingWizard/task.tsx
index 58bc9e403d3405..474d683a227f36 100644
--- a/static/app/components/onboardingWizard/task.tsx
+++ b/static/app/components/onboardingWizard/task.tsx
@@ -30,7 +30,7 @@ const recordAnalytics = (
organization: Organization,
action: string
) =>
- trackAnalytics('onboarding.wizard_clicked', {
+ trackAnalytics('quick_start.task_card_clicked', {
organization,
todo_id: task.task,
todo_title: task.title,
diff --git a/static/app/components/sidebar/onboardingStatus.tsx b/static/app/components/sidebar/onboardingStatus.tsx
index ec1957dc583331..2a465d1cbc2b6e 100644
--- a/static/app/components/sidebar/onboardingStatus.tsx
+++ b/static/app/components/sidebar/onboardingStatus.tsx
@@ -1,4 +1,4 @@
-import {Fragment, useContext} from 'react';
+import {Fragment, useCallback, useContext} from 'react';
import type {Theme} from '@emotion/react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
@@ -41,14 +41,22 @@ export default function OnboardingStatus({
hidePanel,
onShowPanel,
}: Props) {
- const handleShowPanel = () => {
- trackAnalytics('onboarding.wizard_opened', {organization: org});
- onShowPanel();
- };
const onboardingContext = useContext(OnboardingContext);
const {projects} = useProjects();
const {shouldAccordionFloat} = useContext(ExpandedContext);
+ const isActive = currentPanel === SidebarPanelKey.ONBOARDING_WIZARD;
+ const walkthrough = isDemoWalkthrough();
+
+ const handleToggle = useCallback(() => {
+ if (!walkthrough && !isActive === true) {
+ trackAnalytics('quick_start.opened', {
+ organization: org,
+ });
+ }
+ onShowPanel();
+ }, [walkthrough, isActive, onShowPanel, org]);
+
if (!org.features?.includes('onboarding')) {
return null;
}
@@ -62,6 +70,7 @@ export default function OnboardingStatus({
const allDisplayedTasks = tasks
.filter(task => task.display)
.filter(task => !task.renderCard);
+
const doneTasks = allDisplayedTasks.filter(isDone);
const numberRemaining = allDisplayedTasks.length - doneTasks.length;
@@ -72,13 +81,10 @@ export default function OnboardingStatus({
!task.completionSeen
);
- const isActive = currentPanel === SidebarPanelKey.ONBOARDING_WIZARD;
-
if (doneTasks.length >= allDisplayedTasks.length && !isActive) {
return null;
}
- const walkthrough = isDemoWalkthrough();
const label = walkthrough ? t('Guided Tours') : t('Quick Start');
const task = walkthrough ? 'tours' : 'tasks';
@@ -87,7 +93,7 @@ export default function OnboardingStatus({
> {}
const allEventMap: Record = {
@@ -123,6 +128,7 @@ const allEventMap: Record = {
...starfishEventMap,
...signupEventMap,
...statsEventMap,
+ ...quickStartEventMap,
};
/**
diff --git a/static/app/utils/analytics/quickStartAnalyticsEvents.tsx b/static/app/utils/analytics/quickStartAnalyticsEvents.tsx
new file mode 100644
index 00000000000000..b08631946cf3d7
--- /dev/null
+++ b/static/app/utils/analytics/quickStartAnalyticsEvents.tsx
@@ -0,0 +1,13 @@
+export type QuickStartEventParameters = {
+ 'quick_start.opened': {};
+ 'quick_start.task_card_clicked': {
+ action: string;
+ todo_id: string;
+ todo_title: string;
+ };
+};
+
+export const quickStartEventMap: Record = {
+ 'quick_start.opened': 'Quick Start: Opened',
+ 'quick_start.task_card_clicked': 'Quick Start: Task Card Clicked',
+};
From 92ee558f6c3293bdfc94137665c48c962c8f2302 Mon Sep 17 00:00:00 2001
From: Christinarlong <60594860+Christinarlong@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:31:07 -0700
Subject: [PATCH 008/139] chore(sentry_apps): Move web resources for sentryapps
to sentry_apps (#78296)
---
.../web}/debug_sentry_app_notify_disable.py | 3 +--
.../{web/frontend => sentry_apps/web}/sentryapp_avatar.py | 0
src/sentry/web/debug_urls.py | 4 +---
src/sentry/web/urls.py | 2 +-
tests/sentry/api/test_path_params.py | 1 +
.../frontend => sentry_apps/web}/test_sentryapp_avatar.py | 0
6 files changed, 4 insertions(+), 6 deletions(-)
rename src/sentry/{web/frontend/debug => sentry_apps/web}/debug_sentry_app_notify_disable.py (97%)
rename src/sentry/{web/frontend => sentry_apps/web}/sentryapp_avatar.py (100%)
rename tests/sentry/{web/frontend => sentry_apps/web}/test_sentryapp_avatar.py (100%)
diff --git a/src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py b/src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
similarity index 97%
rename from src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py
rename to src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
index 53df7c98ce44f5..06a0f0c7914583 100644
--- a/src/sentry/web/frontend/debug/debug_sentry_app_notify_disable.py
+++ b/src/sentry/sentry_apps/web/debug_sentry_app_notify_disable.py
@@ -6,8 +6,7 @@
from sentry.models.organization import Organization
from sentry.sentry_apps.models.sentry_app import SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
-
-from .mail import MailPreview
+from sentry.web.frontend.debug.mail import MailPreview
class DebugSentryAppNotifyDisableView(View):
diff --git a/src/sentry/web/frontend/sentryapp_avatar.py b/src/sentry/sentry_apps/web/sentryapp_avatar.py
similarity index 100%
rename from src/sentry/web/frontend/sentryapp_avatar.py
rename to src/sentry/sentry_apps/web/sentryapp_avatar.py
diff --git a/src/sentry/web/debug_urls.py b/src/sentry/web/debug_urls.py
index 04d457e434c68f..200e13f9954eac 100644
--- a/src/sentry/web/debug_urls.py
+++ b/src/sentry/web/debug_urls.py
@@ -3,6 +3,7 @@
import sentry.web.frontend.debug.mail
from sentry.integrations.web.debug.debug_notify_disable import DebugNotifyDisableView
+from sentry.sentry_apps.web.debug_sentry_app_notify_disable import DebugSentryAppNotifyDisableView
from sentry.web.frontend.debug import debug_auth_views
from sentry.web.frontend.debug.debug_assigned_email import (
DebugAssignedEmailView,
@@ -62,9 +63,6 @@
DebugResolvedInReleaseEmailView,
DebugResolvedInReleaseUpcomingEmailView,
)
-from sentry.web.frontend.debug.debug_sentry_app_notify_disable import (
- DebugSentryAppNotifyDisableView,
-)
from sentry.web.frontend.debug.debug_setup_2fa_email import DebugSetup2faEmailView
from sentry.web.frontend.debug.debug_sso_link_email import (
DebugSsoLinkedEmailView,
diff --git a/src/sentry/web/urls.py b/src/sentry/web/urls.py
index 7c5b7b3274e62f..48626ac6f10cff 100644
--- a/src/sentry/web/urls.py
+++ b/src/sentry/web/urls.py
@@ -14,6 +14,7 @@
from sentry.charts.endpoints import serve_chartcuterie_config
from sentry.integrations.web.doc_integration_avatar import DocIntegrationAvatarPhotoView
from sentry.integrations.web.organization_integration_setup import OrganizationIntegrationSetupView
+from sentry.sentry_apps.web.sentryapp_avatar import SentryAppAvatarPhotoView
from sentry.toolbar.views.iframe_view import IframeView
from sentry.toolbar.views.login_success_view import LoginSuccessView
from sentry.users.web import accounts
@@ -47,7 +48,6 @@
from sentry.web.frontend.react_page import GenericReactPageView, ReactPageView
from sentry.web.frontend.reactivate_account import ReactivateAccountView
from sentry.web.frontend.release_webhook import ReleaseWebhookView
-from sentry.web.frontend.sentryapp_avatar import SentryAppAvatarPhotoView
from sentry.web.frontend.setup_wizard import SetupWizardView
from sentry.web.frontend.shared_group_details import SharedGroupDetailsView
from sentry.web.frontend.sudo import SudoView
diff --git a/tests/sentry/api/test_path_params.py b/tests/sentry/api/test_path_params.py
index 739db9bfe059ef..f68f9340c9eb1e 100644
--- a/tests/sentry/api/test_path_params.py
+++ b/tests/sentry/api/test_path_params.py
@@ -37,6 +37,7 @@ class TestPathParams(TestCase):
"sentry.web",
"sentry.integrations.web",
"sentry.users.web",
+ "sentry.sentry_apps.web",
"sentry.auth",
"sentry.toolbar",
)
diff --git a/tests/sentry/web/frontend/test_sentryapp_avatar.py b/tests/sentry/sentry_apps/web/test_sentryapp_avatar.py
similarity index 100%
rename from tests/sentry/web/frontend/test_sentryapp_avatar.py
rename to tests/sentry/sentry_apps/web/test_sentryapp_avatar.py
From f7abaeff7e2e72730ada5166fd0e5ba68dac99cc Mon Sep 17 00:00:00 2001
From: Nate Moore
Date: Wed, 2 Oct 2024 13:31:23 -0500
Subject: [PATCH 009/139] feat(nav): add org switcher (#78438)
Enable user/org dropdown switcher for stacked nav
---
static/app/components/nav/sidebar.tsx | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/static/app/components/nav/sidebar.tsx b/static/app/components/nav/sidebar.tsx
index c775c2403f0588..ef332cebcffed9 100644
--- a/static/app/components/nav/sidebar.tsx
+++ b/static/app/components/nav/sidebar.tsx
@@ -3,7 +3,6 @@ import styled from '@emotion/styled';
import {motion} from 'framer-motion';
import Feature from 'sentry/components/acl/feature';
-import OrganizationAvatar from 'sentry/components/avatar/organizationAvatar';
import Link from 'sentry/components/links/link';
import {useNavContext} from 'sentry/components/nav/context';
import Submenu from 'sentry/components/nav/submenu';
@@ -16,19 +15,17 @@ import {
type NavSidebarItem,
resolveNavItemTo,
} from 'sentry/components/nav/utils';
+import SidebarDropdown from 'sentry/components/sidebar/sidebarDropdown';
import {space} from 'sentry/styles/space';
import theme from 'sentry/utils/theme';
import {useLocation} from 'sentry/utils/useLocation';
-import useOrganization from 'sentry/utils/useOrganization';
function Sidebar() {
- const organization = useOrganization();
-
return (
-
+
From 0bf37e07c9cfa2c669e71d6f2e94c8fb4494638a Mon Sep 17 00:00:00 2001
From: Priscila Oliveira
Date: Wed, 2 Oct 2024 20:31:37 +0200
Subject: [PATCH 010/139] ref(onboarding): Remove unused file (#78464)
---
.../onboarding/gettingStartedDoc/layout.tsx | 126 ------------------
1 file changed, 126 deletions(-)
delete mode 100644 static/app/components/onboarding/gettingStartedDoc/layout.tsx
diff --git a/static/app/components/onboarding/gettingStartedDoc/layout.tsx b/static/app/components/onboarding/gettingStartedDoc/layout.tsx
deleted file mode 100644
index 1571a468e1dd55..00000000000000
--- a/static/app/components/onboarding/gettingStartedDoc/layout.tsx
+++ /dev/null
@@ -1,126 +0,0 @@
-import type {ComponentProps} from 'react';
-import {Fragment} from 'react';
-import styled from '@emotion/styled';
-
-import HookOrDefault from 'sentry/components/hookOrDefault';
-import ExternalLink from 'sentry/components/links/externalLink';
-import List from 'sentry/components/list';
-import ListItem from 'sentry/components/list/listItem';
-import {AuthTokenGeneratorProvider} from 'sentry/components/onboarding/gettingStartedDoc/authTokenGenerator';
-import type {StepProps} from 'sentry/components/onboarding/gettingStartedDoc/step';
-import {Step} from 'sentry/components/onboarding/gettingStartedDoc/step';
-import type {NextStep} from 'sentry/components/onboarding/gettingStartedDoc/types';
-import {PlatformOptionsControl} from 'sentry/components/onboarding/platformOptionsControl';
-import {ProductSelection} from 'sentry/components/onboarding/productSelection';
-import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
-import type {PlatformKey} from 'sentry/types/project';
-import useOrganization from 'sentry/utils/useOrganization';
-
-const ProductSelectionAvailabilityHook = HookOrDefault({
- hookName: 'component:product-selection-availability',
- defaultComponent: ProductSelection,
-});
-
-export type LayoutProps = {
- projectSlug: string;
- steps: StepProps[];
- /**
- * An introduction displayed before the steps
- */
- introduction?: React.ReactNode;
- newOrg?: boolean;
- nextSteps?: NextStep[];
- platformKey?: PlatformKey;
- platformOptions?: ComponentProps['platformOptions'];
-};
-
-export function Layout({
- steps,
- platformKey,
- newOrg,
- nextSteps = [],
- platformOptions,
- introduction,
- projectSlug,
-}: LayoutProps) {
- const organization = useOrganization();
-
- return (
-
-
-
- {introduction && {introduction} }
-
- {platformOptions ? (
-
- ) : null}
-
-
-
- {steps.map(step => (
-
- ))}
-
- {nextSteps.length > 0 && (
-
-
- {t('Next Steps')}
-
- {nextSteps.map(step => (
-
- {step.name}
- {': '}
- {step.description}
-
- ))}
-
-
- )}
-
-
- );
-}
-
-const Header = styled('div')`
- display: flex;
- flex-direction: column;
- gap: ${space(2)};
-`;
-
-const Divider = styled('hr')<{withBottomMargin?: boolean}>`
- height: 1px;
- width: 100%;
- background: ${p => p.theme.border};
- border: none;
- ${p => p.withBottomMargin && `margin-bottom: ${space(3)}`}
-`;
-
-const Steps = styled('div')`
- display: flex;
- flex-direction: column;
- gap: 1.5rem;
-`;
-
-const Introduction = styled('div')`
- display: flex;
- flex-direction: column;
- gap: ${space(1)};
-`;
-
-const Wrapper = styled('div')`
- h4 {
- margin-bottom: 0.5em;
- }
- && {
- p {
- margin-bottom: 0;
- }
- h5 {
- margin-bottom: 0;
- }
- }
-`;
From 92a88b193e5558656f5e9f22404fc0895af98033 Mon Sep 17 00:00:00 2001
From: Richard Roggenkemper <46740234+roggenkemper@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:31:54 -0700
Subject: [PATCH 011/139] chore(issues): Add tracing to `get_bytes` (#78433)
add tracing to `get_bytes` in nodestore
---
src/sentry/nodestore/bigtable/backend.py | 1 +
src/sentry/utils/kvstore/bigtable.py | 4 +++-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/sentry/nodestore/bigtable/backend.py b/src/sentry/nodestore/bigtable/backend.py
index aa7580bd9403f3..fc45086f217127 100644
--- a/src/sentry/nodestore/bigtable/backend.py
+++ b/src/sentry/nodestore/bigtable/backend.py
@@ -63,6 +63,7 @@ def __init__(
self.automatic_expiry = automatic_expiry
self.skip_deletes = automatic_expiry and "_SENTRY_CLEANUP" in os.environ
+ @sentry_sdk.tracing.trace
def _get_bytes(self, id: str) -> bytes | None:
return self.store.get(id)
diff --git a/src/sentry/utils/kvstore/bigtable.py b/src/sentry/utils/kvstore/bigtable.py
index 824f191cd0f1ba..bc7f38c3bff6f9 100644
--- a/src/sentry/utils/kvstore/bigtable.py
+++ b/src/sentry/utils/kvstore/bigtable.py
@@ -6,6 +6,7 @@
from threading import Lock
from typing import Any
+import sentry_sdk
from django.utils import timezone
from google.api_core import exceptions, retry
from google.cloud import bigtable
@@ -114,7 +115,8 @@ def _get_table(self, admin: bool = False) -> Table:
return table
def get(self, key: str) -> bytes | None:
- row = self._get_table().read_row(key)
+ with sentry_sdk.start_span(op="bigtable.get"):
+ row = self._get_table().read_row(key)
if row is None:
return None
From 3127eedcd4e7e57606c8c4600dabb5575375b78f Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 14:32:14 -0400
Subject: [PATCH 012/139] ref(rr6): Remove usage of react-router 3
browserHistory (#78445)
This should be the last import in the main bundle, and should remove
imports of react-router 3
---
static/app/utils/browserHistory.tsx | 32 ++++++++++++++++++++---------
1 file changed, 22 insertions(+), 10 deletions(-)
diff --git a/static/app/utils/browserHistory.tsx b/static/app/utils/browserHistory.tsx
index 145f8f5a8e6676..6aa4b862920c65 100644
--- a/static/app/utils/browserHistory.tsx
+++ b/static/app/utils/browserHistory.tsx
@@ -1,5 +1,3 @@
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {browserHistory as react3BrowserHistory} from 'react-router';
import type {Router} from '@remix-run/router/dist/router';
import * as Sentry from '@sentry/react';
import type {History} from 'history';
@@ -9,24 +7,38 @@ import {
locationDescriptorToTo,
} from './reactRouter6Compat/location';
+const historyMethods: Array = [
+ 'listenBefore',
+ 'listen',
+ 'transitionTo',
+ 'push',
+ 'replace',
+ 'go',
+ 'goBack',
+ 'goForward',
+ 'createKey',
+ 'createPath',
+ 'createHref',
+ 'createLocation',
+ 'getCurrentLocation',
+];
+
/**
* Configures a proxy object for the default value of browserHistory. This
* should NOT be called before the DANGEROUS_SET_REACT_ROUTER_6_HISTORY
* fucntion is called. But let's be sure it isn't by adding some logging.
- *
- * It likely does nothing right now since the react-router 3 browserHistory
- * doesn't actally do anything in react router 6 land (I think).
*/
const proxyLegacyBrowserHistory: ProxyHandler = {
- get(target, prop, _receiver) {
- if (prop in target) {
+ get(_target, prop, _receiver) {
+ if (prop in historyMethods) {
// eslint-disable-next-line no-console
- console.error(`legacy browserHistory called (${prop.toString()})!`);
+ console.warn('Legacy browserHistory called before patched!');
Sentry.captureException(new Error('legacy browserHistory called!'), {
level: 'info',
extra: {prop},
});
- return target[prop];
+
+ return () => {};
}
return undefined;
},
@@ -46,7 +58,7 @@ const proxyLegacyBrowserHistory: ProxyHandler = {
* browserHistory.push({...location, query: {someKey: 1}})
* navigate({...location, query: {someKey: 1}})
*/
-export let browserHistory = new Proxy(react3BrowserHistory, proxyLegacyBrowserHistory);
+export let browserHistory = new Proxy({} as History, proxyLegacyBrowserHistory);
/**
* This shim sets the global `browserHistory` to a shim object that matches
From 2756e65dc8d5ef9afcff95586a441d142a1624b5 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 14:32:29 -0400
Subject: [PATCH 013/139] fix(uptime): Default method to `GET` (#78481)
Prior to this it had a placeholder that looked like a default
---
static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
index 0776908b7c82f2..996724b7396e04 100644
--- a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
+++ b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
@@ -153,7 +153,7 @@ export function UptimeAlertForm({project, handleDelete, rule}: Props) {
({
value: option,
label: option,
From 9cf90d8d036e30185d2e5e907a7c5bf0c8f0cf1a Mon Sep 17 00:00:00 2001
From: Colleen O'Rourke
Date: Wed, 2 Oct 2024 11:44:36 -0700
Subject: [PATCH 014/139] ref(alerts): Add rollout flag to front end (#78405)
Frontend counterpart to https://github.com/getsentry/sentry/pull/78359/
---
.../app/views/alerts/rules/metric/ruleForm.spec.tsx | 12 ++++++++++--
.../views/alerts/rules/metric/thresholdTypeForm.tsx | 4 +++-
2 files changed, 13 insertions(+), 3 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
index 65af93813aad2e..7d58b803844268 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
@@ -370,7 +370,11 @@ describe('Incident Rules Form', () => {
});
it('creates an anomaly detection rule', async () => {
- organization.features = [...organization.features, 'anomaly-detection-alerts'];
+ organization.features = [
+ ...organization.features,
+ 'anomaly-detection-alerts',
+ 'anomaly-detection-rollout',
+ ];
const rule = MetricRuleFixture({
sensitivity: AlertRuleSensitivity.MEDIUM,
seasonality: AlertRuleSeasonality.AUTO,
@@ -569,7 +573,11 @@ describe('Incident Rules Form', () => {
});
it('switches to anomaly detection threshold', async () => {
- organization.features = [...organization.features, 'anomaly-detection-alerts'];
+ organization.features = [
+ ...organization.features,
+ 'anomaly-detection-alerts',
+ 'anomaly-detection-rollout',
+ ];
createWrapper({
rule: {
...rule,
diff --git a/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx b/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
index 0981023da072f7..889f963449ad31 100644
--- a/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
+++ b/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
@@ -50,7 +50,9 @@ function ThresholdTypeForm({
'insights_metrics',
]);
- const hasAnomalyDetection = organization.features.includes('anomaly-detection-alerts');
+ const hasAnomalyDetection =
+ organization.features.includes('anomaly-detection-alerts') &&
+ organization.features.includes('anomaly-detection-rollout');
const thresholdTypeChoices: RadioOption[] = [
[AlertRuleComparisonType.COUNT, 'Static: above or below {x}'],
From 5c00a11ea1a31513480cfb11ba70375a6a549c02 Mon Sep 17 00:00:00 2001
From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:45:08 -0400
Subject: [PATCH 015/139] feat(insights): hide web vitals tab transaction
summary (#78412)
Work for https://github.com/getsentry/sentry/issues/77572
With the new domain view, we don't want two web vitals UI, so we can
hide (and then eventually remove) the old transaction summary web vitals
tab.
---
.../views/performance/transactionSummary/header.tsx | 2 +-
.../performance/transactionSummary/pageLayout.tsx | 10 ++++++++--
2 files changed, 9 insertions(+), 3 deletions(-)
diff --git a/static/app/views/performance/transactionSummary/header.tsx b/static/app/views/performance/transactionSummary/header.tsx
index e6e78401553b4c..502576e81e5fce 100644
--- a/static/app/views/performance/transactionSummary/header.tsx
+++ b/static/app/views/performance/transactionSummary/header.tsx
@@ -32,7 +32,7 @@ import TeamKeyTransactionButton from './teamKeyTransactionButton';
import TransactionThresholdButton from './transactionThresholdButton';
import type {TransactionThresholdMetric} from './transactionThresholdModal';
-type Props = {
+export type Props = {
currentTab: Tab;
eventView: EventView;
hasWebVitals: 'maybe' | 'yes' | 'no';
diff --git a/static/app/views/performance/transactionSummary/pageLayout.tsx b/static/app/views/performance/transactionSummary/pageLayout.tsx
index 0d6f6ec84dd195..12d9a591f72abe 100644
--- a/static/app/views/performance/transactionSummary/pageLayout.tsx
+++ b/static/app/views/performance/transactionSummary/pageLayout.tsx
@@ -38,7 +38,7 @@ import {replaysRouteWithQuery} from './transactionReplays/utils';
import {spansRouteWithQuery} from './transactionSpans/utils';
import {tagsRouteWithQuery} from './transactionTags/utils';
import {vitalsRouteWithQuery} from './transactionVitals/utils';
-import TransactionHeader from './header';
+import TransactionHeader, {type Props as TransactionHeaderProps} from './header';
import Tab from './tabs';
import type {TransactionThresholdMetric} from './transactionThresholdModal';
import {generateTransactionSummaryRoute, transactionSummaryRouteWithQuery} from './utils';
@@ -251,6 +251,12 @@ function PageLayout(props: Props) {
const project = projects.find(p => p.id === projectId);
+ let hasWebVitals: TransactionHeaderProps['hasWebVitals'] =
+ tab === Tab.WEB_VITALS ? 'yes' : 'maybe';
+ if (organization.features.includes('insights-domain-view')) {
+ hasWebVitals = 'no';
+ }
+
return (
{
setTransactionThreshold(threshold);
setTransactionThresholdMetric(metric);
From ad4e064ea53db005566e9b45078fdcbb0777409a Mon Sep 17 00:00:00 2001
From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:45:26 -0400
Subject: [PATCH 016/139] feat(insights): move performance mobile view to
performance mobile domain (#78387)
Work for https://github.com/getsentry/sentry/issues/77572
The same PR as https://github.com/getsentry/sentry/pull/78234 but for
mobile.
---
.../pages/mobile/mobileOverviewPage.tsx | 238 +++++++++++++++++-
static/app/views/performance/data.tsx | 4 +-
2 files changed, 231 insertions(+), 11 deletions(-)
diff --git a/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx b/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
index 8caa28008749ca..0e9b4ba933504d 100644
--- a/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
+++ b/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
@@ -1,21 +1,241 @@
-import {Fragment} from 'react';
+import styled from '@emotion/styled';
+import Feature from 'sentry/components/acl/feature';
import * as Layout from 'sentry/components/layouts/thirds';
-import {PageAlert} from 'sentry/utils/performance/contexts/pageAlert';
+import {NoAccess} from 'sentry/components/noAccess';
+import {DatePageFilter} from 'sentry/components/organizations/datePageFilter';
+import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter';
+import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
+import PageFiltersContainer from 'sentry/components/organizations/pageFilters/container';
+import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter';
+import TransactionNameSearchBar from 'sentry/components/performance/searchBar';
+import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle';
+import {trackAnalytics} from 'sentry/utils/analytics';
+import {canUseMetricsData} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
+import {PageAlert, usePageAlert} from 'sentry/utils/performance/contexts/pageAlert';
+import {PerformanceDisplayProvider} from 'sentry/utils/performance/contexts/performanceDisplayContext';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useOrganization from 'sentry/utils/useOrganization';
+import useProjects from 'sentry/utils/useProjects';
+import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout';
+import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon';
+import {useOnboardingProject} from 'sentry/views/insights/common/queries/useOnboardingProject';
import {MobileHeader} from 'sentry/views/insights/pages/mobile/mobilePageHeader';
+import {OVERVIEW_PAGE_TITLE} from 'sentry/views/insights/pages/settings';
+import {
+ generateGenericPerformanceEventView,
+ generateMobilePerformanceEventView,
+} from 'sentry/views/performance/data';
+import {checkIsReactNative} from 'sentry/views/performance/landing/utils';
+import {
+ DoubleChartRow,
+ TripleChartRow,
+} from 'sentry/views/performance/landing/widgets/components/widgetChartRow';
+import {PerformanceWidgetSetting} from 'sentry/views/performance/landing/widgets/widgetDefinitions';
+import Onboarding from 'sentry/views/performance/onboarding';
+import Table from 'sentry/views/performance/table';
+import {
+ getTransactionSearchQuery,
+ ProjectPerformanceType,
+} from 'sentry/views/performance/utils';
+
+const MOBILE_COLUMN_TITLES = [
+ 'transaction',
+ 'project',
+ 'operation',
+ 'tpm',
+ 'slow frame %',
+ 'frozen frame %',
+ 'users',
+];
+
+const REACT_NATIVE_COLUMN_TITLES = [
+ 'transaction',
+ 'project',
+ 'operation',
+ 'tpm',
+ 'slow frame %',
+ 'frozen frame %',
+ 'stall %',
+ 'users',
+];
function MobileOverviewPage() {
+ const organization = useOrganization();
+ const location = useLocation();
+ const {setPageError} = usePageAlert();
+ const {projects} = useProjects();
+ const onboardingProject = useOnboardingProject();
+ const navigate = useNavigate();
+
+ const withStaticFilters = canUseMetricsData(organization);
+
+ const eventView = generateMobilePerformanceEventView(
+ location,
+ projects,
+ generateGenericPerformanceEventView(location, withStaticFilters, organization),
+ withStaticFilters,
+ organization
+ );
+
+ eventView.fields = eventView.fields.filter(
+ field => !['user_misery()', 'count_miserable(user)'].includes(field.field)
+ );
+
+ let columnTitles = checkIsReactNative(eventView)
+ ? REACT_NATIVE_COLUMN_TITLES
+ : MOBILE_COLUMN_TITLES;
+
+ const showOnboarding = onboardingProject !== undefined;
+
+ const doubleChartRowCharts = [
+ PerformanceWidgetSetting.MOST_SLOW_FRAMES,
+ PerformanceWidgetSetting.MOST_FROZEN_FRAMES,
+ ];
+ const tripleChartRowCharts = [
+ PerformanceWidgetSetting.TPM_AREA,
+ PerformanceWidgetSetting.DURATION_HISTOGRAM,
+ PerformanceWidgetSetting.P50_DURATION_AREA,
+ PerformanceWidgetSetting.P75_DURATION_AREA,
+ PerformanceWidgetSetting.P95_DURATION_AREA,
+ PerformanceWidgetSetting.P99_DURATION_AREA,
+ PerformanceWidgetSetting.FAILURE_RATE_AREA,
+ PerformanceWidgetSetting.COLD_STARTUP_AREA,
+ PerformanceWidgetSetting.WARM_STARTUP_AREA,
+ PerformanceWidgetSetting.SLOW_FRAMES_AREA,
+ PerformanceWidgetSetting.FROZEN_FRAMES_AREA,
+ ];
+
+ if (organization.features.includes('mobile-vitals')) {
+ columnTitles = [...columnTitles.slice(0, 5), 'ttid', ...columnTitles.slice(5, 0)];
+ tripleChartRowCharts.push(
+ ...[
+ PerformanceWidgetSetting.TIME_TO_INITIAL_DISPLAY,
+ PerformanceWidgetSetting.TIME_TO_FULL_DISPLAY,
+ ]
+ );
+ }
+ if (organization.features.includes('insights-initial-modules')) {
+ doubleChartRowCharts[0] = PerformanceWidgetSetting.SLOW_SCREENS_BY_TTID;
+ }
+ if (organization.features.includes('starfish-mobile-appstart')) {
+ doubleChartRowCharts.push(
+ PerformanceWidgetSetting.SLOW_SCREENS_BY_COLD_START,
+ PerformanceWidgetSetting.SLOW_SCREENS_BY_WARM_START
+ );
+ }
+
+ if (organization.features.includes('insights-initial-modules')) {
+ doubleChartRowCharts.push(PerformanceWidgetSetting.MOST_TIME_CONSUMING_DOMAINS);
+ }
+
+ const sharedProps = {eventView, location, organization, withStaticFilters};
+
+ const getFreeTextFromQuery = (query: string) => {
+ const conditions = new MutableSearch(query);
+ const transactionValues = conditions.getFilterValues('transaction');
+ if (transactionValues.length) {
+ return transactionValues[0];
+ }
+ if (conditions.freeText.length > 0) {
+ // raw text query will be wrapped in wildcards in generatePerformanceEventView
+ // so no need to wrap it here
+ return conditions.freeText.join(' ');
+ }
+ return '';
+ };
+
+ function handleSearch(searchQuery: string) {
+ trackAnalytics('performance.domains.mobile.search', {organization});
+
+ navigate({
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ cursor: undefined,
+ query: String(searchQuery).trim() || undefined,
+ isDefaultQuery: false,
+ },
+ });
+ }
+
+ const derivedQuery = getTransactionSearchQuery(location, eventView.query);
+
return (
-
+
-
-
- {'overview page'}
-
-
+
+
+
+
+
+
+
+
+
+
+ {!showOnboarding && (
+ {
+ handleSearch(query);
+ }}
+ query={getFreeTextFromQuery(derivedQuery)}
+ />
+ )}
+
+
+
+
+ {!showOnboarding && (
+
+
+
+
+
+ )}
+
+ {showOnboarding && (
+
+ )}
+
+
+
+
+
);
}
-export default MobileOverviewPage;
+function MobileOverviewPageWithProviders() {
+ const organization = useOrganization();
+
+ return (
+
+
+
+
+
+ );
+}
+
+const StyledTransactionNameSearchBar = styled(TransactionNameSearchBar)`
+ flex: 2;
+`;
+
+export default MobileOverviewPageWithProviders;
diff --git a/static/app/views/performance/data.tsx b/static/app/views/performance/data.tsx
index 26082219c741a5..7cf70674bd78f0 100644
--- a/static/app/views/performance/data.tsx
+++ b/static/app/views/performance/data.tsx
@@ -438,7 +438,7 @@ export function prepareQueryForLandingPage(searchQuery, withStaticFilters) {
return conditions.formatString();
}
-function generateGenericPerformanceEventView(
+export function generateGenericPerformanceEventView(
location: Location,
withStaticFilters: boolean,
organization: Organization
@@ -551,7 +551,7 @@ function generateBackendPerformanceEventView(
return eventView;
}
-function generateMobilePerformanceEventView(
+export function generateMobilePerformanceEventView(
location: Location,
projects: Project[],
genericEventView: EventView,
From 67bd39164d14556cb7c8d14db0d6fa6679df6ac3 Mon Sep 17 00:00:00 2001
From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:45:37 -0400
Subject: [PATCH 017/139] feat(insights): move backend performance view to
domain view (#78385)
Work for https://github.com/getsentry/sentry/issues/77572
The same PR as https://github.com/getsentry/sentry/pull/78234 but for
backend.
---
.../pages/backend/backendOverviewPage.tsx | 207 +++++++++++++++++-
static/app/views/performance/data.tsx | 2 +-
2 files changed, 199 insertions(+), 10 deletions(-)
diff --git a/static/app/views/insights/pages/backend/backendOverviewPage.tsx b/static/app/views/insights/pages/backend/backendOverviewPage.tsx
index a0cca88403e906..0b34ac3442f3a4 100644
--- a/static/app/views/insights/pages/backend/backendOverviewPage.tsx
+++ b/static/app/views/insights/pages/backend/backendOverviewPage.tsx
@@ -1,21 +1,210 @@
-import {Fragment} from 'react';
+import styled from '@emotion/styled';
+import Feature from 'sentry/components/acl/feature';
+import {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable';
import * as Layout from 'sentry/components/layouts/thirds';
-import {PageAlert} from 'sentry/utils/performance/contexts/pageAlert';
+import {NoAccess} from 'sentry/components/noAccess';
+import {DatePageFilter} from 'sentry/components/organizations/datePageFilter';
+import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter';
+import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
+import PageFiltersContainer from 'sentry/components/organizations/pageFilters/container';
+import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter';
+import TransactionNameSearchBar from 'sentry/components/performance/searchBar';
+import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle';
+import {trackAnalytics} from 'sentry/utils/analytics';
+import {canUseMetricsData} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
+import {PageAlert, usePageAlert} from 'sentry/utils/performance/contexts/pageAlert';
+import {PerformanceDisplayProvider} from 'sentry/utils/performance/contexts/performanceDisplayContext';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useOrganization from 'sentry/utils/useOrganization';
+import useProjects from 'sentry/utils/useProjects';
+import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout';
+import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon';
+import {useOnboardingProject} from 'sentry/views/insights/common/queries/useOnboardingProject';
import {BackendHeader} from 'sentry/views/insights/pages/backend/backendPageHeader';
+import {OVERVIEW_PAGE_TITLE} from 'sentry/views/insights/pages/settings';
+import {generateBackendPerformanceEventView} from 'sentry/views/performance/data';
+import {
+ DoubleChartRow,
+ TripleChartRow,
+} from 'sentry/views/performance/landing/widgets/components/widgetChartRow';
+import {PerformanceWidgetSetting} from 'sentry/views/performance/landing/widgets/widgetDefinitions';
+import Onboarding from 'sentry/views/performance/onboarding';
+import Table from 'sentry/views/performance/table';
+import {
+ getTransactionSearchQuery,
+ ProjectPerformanceType,
+} from 'sentry/views/performance/utils';
+
+export const BACKEND_COLUMN_TITLES = [
+ 'transaction',
+ 'project',
+ 'operation',
+ 'tpm',
+ 'p50()',
+ 'p75()',
+ 'p95()',
+ 'users',
+];
function BackendOverviewPage() {
+ const organization = useOrganization();
+ const location = useLocation();
+ const {setPageError} = usePageAlert();
+ const {projects} = useProjects();
+ const onboardingProject = useOnboardingProject();
+ const navigate = useNavigate();
+
+ const withStaticFilters = canUseMetricsData(organization);
+ const eventView = generateBackendPerformanceEventView(
+ location,
+ withStaticFilters,
+ organization
+ );
+
+ // TODO - this should come from MetricsField / EAP fields
+ eventView.fields = [
+ {field: 'team_key_transaction'},
+ {field: 'transaction'},
+ {field: 'project'},
+ {field: 'transaction.op'},
+ {field: 'tpm()'},
+ {field: 'p50(transaction.duration)'},
+ {field: 'p75(transaction.duration)'},
+ {field: 'p95(transaction.duration)'},
+ ].map(field => ({...field, width: COL_WIDTH_UNDEFINED}));
+
+ const showOnboarding = onboardingProject !== undefined;
+
+ const doubleChartRowCharts = [
+ PerformanceWidgetSetting.SLOW_HTTP_OPS,
+ PerformanceWidgetSetting.SLOW_DB_OPS,
+ ];
+ const tripleChartRowCharts = [
+ PerformanceWidgetSetting.TPM_AREA,
+ PerformanceWidgetSetting.DURATION_HISTOGRAM,
+ PerformanceWidgetSetting.P50_DURATION_AREA,
+ PerformanceWidgetSetting.P75_DURATION_AREA,
+ PerformanceWidgetSetting.P95_DURATION_AREA,
+ PerformanceWidgetSetting.P99_DURATION_AREA,
+ PerformanceWidgetSetting.FAILURE_RATE_AREA,
+ ];
+
+ if (organization.features.includes('insights-initial-modules')) {
+ doubleChartRowCharts.unshift(
+ PerformanceWidgetSetting.HIGHEST_CACHE_MISS_RATE_TRANSACTIONS
+ );
+ doubleChartRowCharts.unshift(PerformanceWidgetSetting.MOST_TIME_CONSUMING_DOMAINS);
+ doubleChartRowCharts.unshift(PerformanceWidgetSetting.MOST_TIME_SPENT_DB_QUERIES);
+ }
+
+ const sharedProps = {eventView, location, organization, withStaticFilters};
+
+ const getFreeTextFromQuery = (query: string) => {
+ const conditions = new MutableSearch(query);
+ const transactionValues = conditions.getFilterValues('transaction');
+ if (transactionValues.length) {
+ return transactionValues[0];
+ }
+ if (conditions.freeText.length > 0) {
+ // raw text query will be wrapped in wildcards in generatePerformanceEventView
+ // so no need to wrap it here
+ return conditions.freeText.join(' ');
+ }
+ return '';
+ };
+
+ function handleSearch(searchQuery: string) {
+ trackAnalytics('performance.domains.backend.search', {organization});
+
+ navigate({
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ cursor: undefined,
+ query: String(searchQuery).trim() || undefined,
+ isDefaultQuery: false,
+ },
+ });
+ }
+
+ const derivedQuery = getTransactionSearchQuery(location, eventView.query);
+
return (
-
+
-
-
- {'overview page'}
-
-
+
+
+
+
+
+
+
+
+
+
+ {!showOnboarding && (
+ {
+ handleSearch(query);
+ }}
+ query={getFreeTextFromQuery(derivedQuery)}
+ />
+ )}
+
+
+
+
+ {!showOnboarding && (
+
+
+
+
+
+ )}
+
+ {showOnboarding && (
+
+ )}
+
+
+
+
+
+ );
+}
+
+function BackendOverviewPageWithProviders() {
+ const organization = useOrganization();
+
+ return (
+
+
+
+
+
);
}
-export default BackendOverviewPage;
+const StyledTransactionNameSearchBar = styled(TransactionNameSearchBar)`
+ flex: 2;
+`;
+
+export default BackendOverviewPageWithProviders;
diff --git a/static/app/views/performance/data.tsx b/static/app/views/performance/data.tsx
index 7cf70674bd78f0..90edaf9cc07844 100644
--- a/static/app/views/performance/data.tsx
+++ b/static/app/views/performance/data.tsx
@@ -499,7 +499,7 @@ export function generateGenericPerformanceEventView(
return eventView;
}
-function generateBackendPerformanceEventView(
+export function generateBackendPerformanceEventView(
location: Location,
withStaticFilters: boolean,
organization: Organization
From 305533265fcd651cec1e4f5dcabb146d8c98d0c8 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 11:50:16 -0700
Subject: [PATCH 018/139] fix(issues): Shrink stack trace platform icon
(#78490)
---
.../interfaces/crashContent/stackTrace/platformIcon.tsx | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/platformIcon.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/platformIcon.tsx
index 8ed1863a3adae1..26bcfaef6af6e7 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/platformIcon.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/platformIcon.tsx
@@ -1,15 +1,18 @@
import styled from '@emotion/styled';
import {PlatformIcon} from 'platformicons';
+import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils';
+
type Props = {
platform: string;
};
function StacktracePlatformIcon({platform}: Props) {
+ const hasStreamlineUi = useHasStreamlinedUI();
return (
@@ -19,7 +22,7 @@ function StacktracePlatformIcon({platform}: Props) {
const StyledPlatformIcon = styled(PlatformIcon)`
position: absolute;
top: 0;
- left: -20px;
+ left: -${p => p.size};
border-radius: 3px 0 0 3px;
@media (max-width: ${p => p.theme.breakpoints.medium}) {
From ae9022c096c87918a8e3a04d41f1b690794e7787 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 11:50:36 -0700
Subject: [PATCH 019/139] fix(issues): Allow the parent to align annotated text
(#78484)
---
static/app/components/events/meta/annotatedText/redaction.tsx | 1 -
1 file changed, 1 deletion(-)
diff --git a/static/app/components/events/meta/annotatedText/redaction.tsx b/static/app/components/events/meta/annotatedText/redaction.tsx
index e07937d1a33b84..825fc050472aab 100644
--- a/static/app/components/events/meta/annotatedText/redaction.tsx
+++ b/static/app/components/events/meta/annotatedText/redaction.tsx
@@ -2,6 +2,5 @@ import styled from '@emotion/styled';
export const Redaction = styled('span')<{withoutBackground?: boolean}>`
cursor: default;
- vertical-align: middle;
${p => !p.withoutBackground && `background: rgba(255, 0, 0, 0.05);`}
`;
From adac4d36898fa758eab39be807a3887875412d96 Mon Sep 17 00:00:00 2001
From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com>
Date: Wed, 2 Oct 2024 11:52:54 -0700
Subject: [PATCH 020/139] fix(issue-views): Fix query being overwritten on
default tab (#78450)
Fixes a bug where users were unable to load a query directly in the url
query params if they were on a default view.
---
static/app/views/issueList/customViewsHeader.tsx | 3 +++
1 file changed, 3 insertions(+)
diff --git a/static/app/views/issueList/customViewsHeader.tsx b/static/app/views/issueList/customViewsHeader.tsx
index 49380ced3b2461..61249942cf958d 100644
--- a/static/app/views/issueList/customViewsHeader.tsx
+++ b/static/app/views/issueList/customViewsHeader.tsx
@@ -156,6 +156,9 @@ function CustomViewsIssueListHeaderTabsContent({
const getInitialTabKey = () => {
if (draggableTabs[0].key.startsWith('default')) {
+ if (query) {
+ return TEMPORARY_TAB_KEY;
+ }
return draggableTabs[0].key;
}
if (!query && !sort && !viewId) {
From 90ded67cb09eda55944e98a490fab7a31134b6ee Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Wed, 2 Oct 2024 11:53:16 -0700
Subject: [PATCH 021/139] ref(various): Small fixes and refactors (#78444)
This is my latest collection of small tweaks and refactors pulled out of other PRs to avoid polluting them with off-topic stuff. No behavior changes.
Included changes:
- Rename `find_existing_grouphash` to `find_grouphash_with_group` and `check_for_category_mismatch` to `is_non_error_type_group`, since those names more accurately describe what those functions are doing.
- Use the seer metrics sample rate for a metric which was missed when that change was made.
- Do some small clean up work in the `test_existing_group_new_hash_exists` test in `test_assign_to_group.py`.
- Move the one existing grouphash metadata test into its own file, in anticipation of there being many more tests in the future.
- Update/add some comments and docstrings.
---
src/sentry/event_manager.py | 20 +++++---
src/sentry/grouping/ingest/config.py | 4 +-
src/sentry/grouping/ingest/hashing.py | 12 +++--
src/sentry/grouping/ingest/seer.py | 5 +-
src/sentry/grouping/ingest/utils.py | 2 +-
src/sentry/projectoptions/manager.py | 3 ++
src/sentry/seer/similarity/utils.py | 2 +-
.../grouping/test_assign_to_group.py | 49 ++++++++++---------
.../grouping/test_grouphash_metadata.py | 46 +++++++++++++++++
.../test_event_manager_grouping.py | 38 --------------
10 files changed, 104 insertions(+), 77 deletions(-)
create mode 100644 tests/sentry/event_manager/grouping/test_grouphash_metadata.py
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 82a9a7d0dfa6bb..5463cc9146937c 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -55,7 +55,7 @@
)
from sentry.grouping.ingest.config import is_in_transition, update_grouping_config_if_needed
from sentry.grouping.ingest.hashing import (
- find_existing_grouphash,
+ find_grouphash_with_group,
get_or_create_grouphashes,
maybe_run_background_grouping,
maybe_run_secondary_grouping,
@@ -65,8 +65,8 @@
from sentry.grouping.ingest.seer import maybe_check_seer_for_matching_grouphash
from sentry.grouping.ingest.utils import (
add_group_id_to_grouphashes,
- check_for_category_mismatch,
check_for_group_creation_load_shed,
+ is_non_error_type_group,
)
from sentry.ingest.inbound_filters import FilterStatKeys
from sentry.integrations.tasks.kick_off_status_syncs import kick_off_status_syncs
@@ -1297,6 +1297,9 @@ def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> G
metric_tags=metric_tags,
)
+ # The only way there won't be group info is we matched to a performance, cron, replay, or
+ # other-non-error-type group because of a hash collision - exceedingly unlikely, and not
+ # something we've ever observed, but theoretically possible.
if group_info:
event.group = group_info.group
job["groups"] = [group_info]
@@ -1319,7 +1322,8 @@ def _save_aggregate_new(
if primary.existing_grouphash:
group_info = handle_existing_grouphash(job, primary.existing_grouphash, primary.grouphashes)
result = "found_primary"
- # If we haven't, try again using the secondary config
+ # If we haven't, try again using the secondary config. (If there is no secondary config, or
+ # we're out of the transition period, we'll get back the empty `NULL_GROUPHASH_INFO`.)
else:
secondary = get_hashes_and_grouphashes(job, maybe_run_secondary_grouping, metric_tags)
all_grouphashes = primary.grouphashes + secondary.grouphashes
@@ -1384,7 +1388,7 @@ def get_hashes_and_grouphashes(
if hashes:
grouphashes = get_or_create_grouphashes(project, hashes)
- existing_grouphash = find_existing_grouphash(grouphashes)
+ existing_grouphash = find_grouphash_with_group(grouphashes)
return GroupHashInfo(grouping_config, hashes, grouphashes, existing_grouphash)
else:
@@ -1421,7 +1425,11 @@ def handle_existing_grouphash(
# well as GH-5085.
group = Group.objects.get(id=existing_grouphash.group_id)
- if check_for_category_mismatch(group):
+ # As far as we know this has never happened, but in theory at least, the error event hashing
+ # algorithm and other event hashing algorithms could come up with the same hash value in the
+ # same project and our hash could have matched to a non-error group. Just to be safe, we make
+ # sure that's not the case before proceeding.
+ if is_non_error_type_group(group):
return None
# There may still be hashes that we did not use to find an existing
@@ -1487,7 +1495,7 @@ def create_group_with_grouphashes(job: Job, grouphashes: list[GroupHash]) -> Gro
# condition scenario above, we'll have been blocked long enough for the other event to
# have created the group and updated our grouphashes with a group id, which means this
# time, we'll find something.
- existing_grouphash = find_existing_grouphash(grouphashes)
+ existing_grouphash = find_grouphash_with_group(grouphashes)
# If we still haven't found a matching grouphash, we're now safe to go ahead and create
# the group.
diff --git a/src/sentry/grouping/ingest/config.py b/src/sentry/grouping/ingest/config.py
index 5b16e5c0f1bdd3..460bad6863dec1 100644
--- a/src/sentry/grouping/ingest/config.py
+++ b/src/sentry/grouping/ingest/config.py
@@ -53,8 +53,8 @@ def update_grouping_config_if_needed(project: Project, source: str) -> None:
from sentry import audit_log
from sentry.utils.audit import create_system_audit_entry
- # This is when we will stop calculating both old hashes (which we do in an effort to
- # preserve group continuity).
+ # This is when we will stop calculating the old hash in cases where we don't find the new
+ # hash (which we do in an effort to preserve group continuity).
expiry = int(time.time()) + settings.SENTRY_GROUPING_UPDATE_MIGRATION_PHASE
changes: dict[str, str | int] = {"sentry:grouping_config": new_config}
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index bd2932ae0d12e1..60a19bc3b4d916 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -40,8 +40,8 @@ def _calculate_event_grouping(
project: Project, event: Event, grouping_config: GroupingConfig
) -> list[str]:
"""
- Main entrypoint for modifying/enhancing and grouping an event, writes
- hashes back into event payload.
+ Calculate hashes for the event using the given grouping config, and add them into the event
+ data.
"""
metric_tags: MutableTags = {
"grouping_config": grouping_config["id"],
@@ -147,7 +147,7 @@ def _calculate_secondary_hashes(
description="event_manager.save.secondary_calculate_event_grouping",
):
# create a copy since `_calculate_event_grouping` modifies the event to add all sorts
- # of grouping info and we don't want the backup grouping data in there
+ # of grouping info and we don't want the secondary grouping data in there
event_copy = copy.deepcopy(job["event"])
secondary_hashes = _calculate_event_grouping(
project, event_copy, secondary_grouping_config
@@ -191,9 +191,13 @@ def _calculate_primary_hashes(
return _calculate_event_grouping(project, job["event"], grouping_config)
-def find_existing_grouphash(
+def find_grouphash_with_group(
grouphashes: Sequence[GroupHash],
) -> GroupHash | None:
+ """
+ Search in the list of given `GroupHash` records for one which has a group assigned to it, and
+ return the first one found. (Assumes grouphashes have already been sorted in priority order.)
+ """
for group_hash in grouphashes:
if group_hash.group_id is not None:
return group_hash
diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py
index 0f8f8d70231328..2536d1f6b560f1 100644
--- a/src/sentry/grouping/ingest/seer.py
+++ b/src/sentry/grouping/ingest/seer.py
@@ -243,13 +243,12 @@ def maybe_check_seer_for_matching_grouphash(
sample_rate=options.get("seer.similarity.metrics_sample_rate"),
tags={"call_made": True, "blocker": "none"},
)
+
try:
# If no matching group is found in Seer, we'll still get back result
# metadata, but `seer_matched_grouphash` will be None
seer_response_data, seer_matched_grouphash = get_seer_similar_issues(event)
-
- # Insurance - in theory we shouldn't ever land here
- except Exception as e:
+ except Exception as e: # Insurance - in theory we shouldn't ever land here
sentry_sdk.capture_exception(
e, tags={"event": event.event_id, "project": event.project.id}
)
diff --git a/src/sentry/grouping/ingest/utils.py b/src/sentry/grouping/ingest/utils.py
index f18049ca8ce463..5e7c3261650f71 100644
--- a/src/sentry/grouping/ingest/utils.py
+++ b/src/sentry/grouping/ingest/utils.py
@@ -48,7 +48,7 @@ def check_for_group_creation_load_shed(project: Project, event: Event) -> None:
raise HashDiscarded("Load shedding group creation", reason="load_shed")
-def check_for_category_mismatch(group: Group) -> bool:
+def is_non_error_type_group(group: Group) -> bool:
"""
Make sure an error event hasn't hashed to a value assigned to a non-error-type group
"""
diff --git a/src/sentry/projectoptions/manager.py b/src/sentry/projectoptions/manager.py
index 7256eab87904f6..af5b3e572722ec 100644
--- a/src/sentry/projectoptions/manager.py
+++ b/src/sentry/projectoptions/manager.py
@@ -15,8 +15,11 @@ def get_default(self, project=None, epoch=None):
epoch = 1
else:
epoch = project.get_option("sentry:option-epoch") or 1
+ # Find where in the ordered epoch list the project's epoch would go
idx = bisect.bisect(self._epoch_default_list, epoch)
if idx > 0:
+ # Return the value corresponding to the highest epoch which doesn't exceed the
+ # project epoch
return self.epoch_defaults[self._epoch_default_list[idx - 1]]
return self.default
diff --git a/src/sentry/seer/similarity/utils.py b/src/sentry/seer/similarity/utils.py
index 9ef635e1492d8c..0eb63ede964a41 100644
--- a/src/sentry/seer/similarity/utils.py
+++ b/src/sentry/seer/similarity/utils.py
@@ -193,7 +193,7 @@ def get_stacktrace_string(data: dict[str, Any]) -> str:
if frame_dict["filename"].startswith(base64_prefix):
metrics.incr(
"seer.grouping.base64_encoded_filename",
- sample_rate=1.0,
+ sample_rate=options.get("seer.similarity.metrics_sample_rate"),
)
base64_encoded = True
break
diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py
index 81e07ecb5684e7..411f8d11d87025 100644
--- a/tests/sentry/event_manager/grouping/test_assign_to_group.py
+++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py
@@ -12,7 +12,7 @@
from sentry.grouping.ingest.hashing import (
_calculate_primary_hashes,
_calculate_secondary_hashes,
- find_existing_grouphash,
+ find_grouphash_with_group,
)
from sentry.grouping.ingest.metrics import record_hash_calculation_metrics
from sentry.models.grouphash import GroupHash
@@ -28,15 +28,15 @@
@contextmanager
def patch_grouping_helpers(return_values: dict[str, Any]):
- wrapped_find_existing_grouphash = capture_results(find_existing_grouphash, return_values)
+ wrapped_find_grouphash_with_group = capture_results(find_grouphash_with_group, return_values)
wrapped_calculate_primary_hashes = capture_results(_calculate_primary_hashes, return_values)
wrapped_calculate_secondary_hashes = capture_results(_calculate_secondary_hashes, return_values)
with (
mock.patch(
- "sentry.event_manager.find_existing_grouphash",
- wraps=wrapped_find_existing_grouphash,
- ) as find_existing_grouphash_spy,
+ "sentry.event_manager.find_grouphash_with_group",
+ wraps=wrapped_find_grouphash_with_group,
+ ) as find_grouphash_with_group_spy,
mock.patch(
"sentry.grouping.ingest.hashing._calculate_primary_hashes",
wraps=wrapped_calculate_primary_hashes,
@@ -58,7 +58,7 @@ def patch_grouping_helpers(return_values: dict[str, Any]):
) as record_calculation_metrics_spy,
):
yield {
- "find_existing_grouphash": find_existing_grouphash_spy,
+ "find_grouphash_with_group": find_grouphash_with_group_spy,
"_calculate_primary_hashes": calculate_primary_hashes_spy,
"_calculate_secondary_hashes": calculate_secondary_hashes_spy,
"_create_group": create_group_spy,
@@ -160,7 +160,7 @@ def get_results_from_saving_event(
gh.hash: gh.group_id for gh in GroupHash.objects.filter(project_id=project.id)
}
- hash_search_results = return_values["find_existing_grouphash"]
+ hash_search_results = return_values["find_grouphash_with_group"]
# Filter out all the Nones to see if we actually found anything
filtered_results = list(filter(lambda result: bool(result), hash_search_results))
hash_search_result = filtered_results[0] if filtered_results else None
@@ -235,6 +235,17 @@ def get_results_from_saving_event(
}
+# The overall idea of these tests is to prove that
+#
+# a) We only run the secondary calculation when the project is in transtiion
+# b) In transition, we only run the secondary calculation if the primary calculation
+# doesn't find an existing group
+# c) If the primary (or secondary, if it's calculated) hash finds a group, the event is
+# assigned there
+# d) If neither finds a group, a new group is created and both the primary (and secondary,
+# if it's calculated) hashes are stored
+
+
@django_db_all
@pytest.mark.parametrize(
"in_transition", (True, False), ids=(" in_transition: True ", " in_transition: False ")
@@ -368,24 +379,18 @@ def test_existing_group_new_hash_exists(
existing_event = save_event_with_grouping_config(
event_data, project, DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG, True
)
- assert existing_event.group_id is not None
- assert (
- GroupHash.objects.filter(
- project_id=project.id, group_id=existing_event.group_id
- ).count()
- == 2
- )
+ group_id = existing_event.group_id
+
+ assert group_id is not None
+ assert GroupHash.objects.filter(project_id=project.id, group_id=group_id).count() == 2
else:
existing_event = save_event_with_grouping_config(
event_data, project, DEFAULT_GROUPING_CONFIG
)
- assert existing_event.group_id is not None
- assert (
- GroupHash.objects.filter(
- project_id=project.id, group_id=existing_event.group_id
- ).count()
- == 1
- )
+ group_id = existing_event.group_id
+
+ assert group_id is not None
+ assert GroupHash.objects.filter(project_id=project.id, group_id=group_id).count() == 1
# Now save a new, identical, event
results = get_results_from_saving_event(
@@ -394,7 +399,7 @@ def test_existing_group_new_hash_exists(
primary_config=DEFAULT_GROUPING_CONFIG,
secondary_config=LEGACY_GROUPING_CONFIG,
in_transition=in_transition,
- existing_group_id=existing_event.group_id,
+ existing_group_id=group_id,
)
assert results == {
diff --git a/tests/sentry/event_manager/grouping/test_grouphash_metadata.py b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
new file mode 100644
index 00000000000000..c3bbc2824a42d3
--- /dev/null
+++ b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
@@ -0,0 +1,46 @@
+from __future__ import annotations
+
+from sentry.models.grouphash import GroupHash
+from sentry.models.grouphashmetadata import GroupHashMetadata
+from sentry.testutils.cases import TestCase
+from sentry.testutils.helpers import Feature
+from sentry.testutils.helpers.eventprocessing import save_new_event
+from sentry.testutils.helpers.options import override_options
+from sentry.testutils.skips import requires_snuba
+
+pytestmark = [requires_snuba]
+
+
+class GroupHashMetadataTest(TestCase):
+ def test_creates_grouphash_metadata_when_appropriate(self):
+ # The killswitch is obeyed
+ with override_options({"grouping.grouphash_metadata.ingestion_writes_enabled": False}):
+ event1 = save_new_event({"message": "Dogs are great!"}, self.project)
+ grouphash = GroupHash.objects.filter(
+ project=self.project, hash=event1.get_primary_hash()
+ ).first()
+ assert grouphash and grouphash.metadata is None
+
+ # The feature flag is obeyed
+ with Feature({"organizations:grouphash-metadata-creation": False}):
+ event2 = save_new_event({"message": "Sit! Good dog!"}, self.project)
+ grouphash = GroupHash.objects.filter(
+ project=self.project, hash=event2.get_primary_hash()
+ ).first()
+ assert grouphash and grouphash.metadata is None
+
+ with Feature({"organizations:grouphash-metadata-creation": True}):
+ # New hashes get metadata
+ event3 = save_new_event({"message": "Adopt, don't shop"}, self.project)
+ grouphash = GroupHash.objects.filter(
+ project=self.project, hash=event3.get_primary_hash()
+ ).first()
+ assert grouphash and isinstance(grouphash.metadata, GroupHashMetadata)
+
+ # For now, existing hashes aren't backfiled when new events are assigned to them
+ event4 = save_new_event({"message": "Dogs are great!"}, self.project)
+ assert event4.get_primary_hash() == event1.get_primary_hash()
+ grouphash = GroupHash.objects.filter(
+ project=self.project, hash=event4.get_primary_hash()
+ ).first()
+ assert grouphash and grouphash.metadata is None
diff --git a/tests/sentry/event_manager/test_event_manager_grouping.py b/tests/sentry/event_manager/test_event_manager_grouping.py
index 08d39d3ce9af45..402632a3d47fa4 100644
--- a/tests/sentry/event_manager/test_event_manager_grouping.py
+++ b/tests/sentry/event_manager/test_event_manager_grouping.py
@@ -13,14 +13,10 @@
from sentry.eventtypes.base import DefaultEvent
from sentry.models.auditlogentry import AuditLogEntry
from sentry.models.group import Group
-from sentry.models.grouphash import GroupHash
-from sentry.models.grouphashmetadata import GroupHashMetadata
from sentry.models.project import Project
from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
from sentry.testutils.cases import TestCase
-from sentry.testutils.helpers import Feature
from sentry.testutils.helpers.eventprocessing import save_new_event
-from sentry.testutils.helpers.options import override_options
from sentry.testutils.pytest.fixtures import django_db_all
from sentry.testutils.silo import assume_test_silo_mode_of
from sentry.testutils.skips import requires_snuba
@@ -172,40 +168,6 @@ def test_auto_updates_grouping_config(self):
)
assert actual_expiry == expected_expiry or actual_expiry == expected_expiry - 1
- def test_creates_grouphash_metadata_when_appropriate(self):
-
- # The killswitch is obeyed
- with override_options({"grouping.grouphash_metadata.ingestion_writes_enabled": False}):
- event1 = save_new_event({"message": "Dogs are great!"}, self.project)
- grouphash = GroupHash.objects.filter(
- project=self.project, hash=event1.get_primary_hash()
- ).first()
- assert grouphash and grouphash.metadata is None
-
- # The feature flag is obeyed
- with Feature({"organizations:grouphash-metadata-creation": False}):
- event2 = save_new_event({"message": "Sit! Good dog!"}, self.project)
- grouphash = GroupHash.objects.filter(
- project=self.project, hash=event2.get_primary_hash()
- ).first()
- assert grouphash and grouphash.metadata is None
-
- with Feature({"organizations:grouphash-metadata-creation": True}):
- # New hashes get metadata
- event3 = save_new_event({"message": "Adopt, don't shop"}, self.project)
- grouphash = GroupHash.objects.filter(
- project=self.project, hash=event3.get_primary_hash()
- ).first()
- assert grouphash and isinstance(grouphash.metadata, GroupHashMetadata)
-
- # For now, existing hashes aren't backfiled when new events are assigned to them
- event4 = save_new_event({"message": "Dogs are great!"}, self.project)
- assert event4.get_primary_hash() == event1.get_primary_hash()
- grouphash = GroupHash.objects.filter(
- project=self.project, hash=event4.get_primary_hash()
- ).first()
- assert grouphash and grouphash.metadata is None
-
class PlaceholderTitleTest(TestCase):
"""
From 6c8c123d34a1e007c758b1da15bff90fc64a2dd5 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 11:54:21 -0700
Subject: [PATCH 022/139] feat(issues): Add "jump to" for feature flags, trace
(#78439)
---
.../events/featureFlags/eventFeatureFlagList.tsx | 3 ++-
.../events/interfaces/performance/eventTraceView.tsx | 2 +-
static/app/views/issueDetails/streamline/context.tsx | 8 ++++++++
.../app/views/issueDetails/streamline/eventNavigation.tsx | 2 ++
4 files changed, 13 insertions(+), 2 deletions(-)
diff --git a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
index b5d64d263dbf54..e6715d4b4bc9aa 100644
--- a/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
+++ b/static/app/components/events/featureFlags/eventFeatureFlagList.tsx
@@ -31,6 +31,7 @@ import type {Project} from 'sentry/types/project';
import {trackAnalytics} from 'sentry/utils/analytics';
import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
import useOrganization from 'sentry/utils/useOrganization';
+import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
import {InterimSection} from 'sentry/views/issueDetails/streamline/interimSection';
export function EventFeatureFlagList({
@@ -200,7 +201,7 @@ export function EventFeatureFlagList({
)}
isHelpHoverable
title={t('Feature Flags')}
- type="feature-flags"
+ type={SectionKey.FEATURE_FLAGS}
actions={actions}
>
diff --git a/static/app/components/events/interfaces/performance/eventTraceView.tsx b/static/app/components/events/interfaces/performance/eventTraceView.tsx
index d3bef4960ce5d7..c2a5756d76a949 100644
--- a/static/app/components/events/interfaces/performance/eventTraceView.tsx
+++ b/static/app/components/events/interfaces/performance/eventTraceView.tsx
@@ -100,7 +100,7 @@ function EventTraceViewInner({
}
return (
-
+
(
From 5b0be82eb76ef271c9fcca2c45961b759291b5fe Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 11:54:52 -0700
Subject: [PATCH 023/139] fix(issues): Handle native frame jump to images
loaded (#78448)
---
.../interfaces/frame/deprecatedLine.tsx | 11 ++++++--
.../events/interfaces/frame/line/native.tsx | 7 +++--
.../events/interfaces/nativeFrame.tsx | 28 +++++++++++++++++--
3 files changed, 38 insertions(+), 8 deletions(-)
diff --git a/static/app/components/events/interfaces/frame/deprecatedLine.tsx b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
index c2fd0390dde9b7..9118969f3a9bcf 100644
--- a/static/app/components/events/interfaces/frame/deprecatedLine.tsx
+++ b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
@@ -1,7 +1,6 @@
import {Component, Fragment} from 'react';
import styled from '@emotion/styled';
import classNames from 'classnames';
-import scrollToElement from 'scroll-to-element';
import {openModal} from 'sentry/actionCreators/modal';
import Tag from 'sentry/components/badge/tag';
@@ -30,6 +29,7 @@ import type {PlatformKey} from 'sentry/types/project';
import {trackAnalytics} from 'sentry/utils/analytics';
import withOrganization from 'sentry/utils/withOrganization';
import withSentryAppComponents from 'sentry/utils/withSentryAppComponents';
+import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
import type DebugImage from '../debugMeta/debugImage';
import {combineStatus} from '../debugMeta/utils';
@@ -204,12 +204,17 @@ export class DeprecatedLine extends Component {
makeFilter(instructionAddr, addrMode, this.props.image)
);
}
- scrollToElement('#images-loaded');
+
+ document
+ .getElementById(SectionKey.DEBUGMETA)
+ ?.scrollIntoView({block: 'start', behavior: 'smooth'});
};
scrollToSuspectRootCause = event => {
event.stopPropagation(); // to prevent collapsing if collapsible
- scrollToElement('#suspect-root-cause');
+ document
+ .getElementById(SectionKey.SUSPECT_ROOT_CAUSE)
+ ?.scrollIntoView({block: 'start', behavior: 'smooth'});
};
preventCollapse = evt => {
diff --git a/static/app/components/events/interfaces/frame/line/native.tsx b/static/app/components/events/interfaces/frame/line/native.tsx
index 8e5916d35bc51e..6443bf4be74ab4 100644
--- a/static/app/components/events/interfaces/frame/line/native.tsx
+++ b/static/app/components/events/interfaces/frame/line/native.tsx
@@ -1,12 +1,12 @@
import {useContext} from 'react';
import styled from '@emotion/styled';
-import scrollToElement from 'scroll-to-element';
import {TraceEventDataSectionContext} from 'sentry/components/events/traceEventDataSection';
import {t} from 'sentry/locale';
import DebugMetaStore from 'sentry/stores/debugMetaStore';
import {space} from 'sentry/styles/space';
import type {Frame} from 'sentry/types/event';
+import {SectionKey} from 'sentry/views/issueDetails/streamline/context';
import type DebugImage from '../../debugMeta/debugImage';
import {combineStatus} from '../../debugMeta/utils';
@@ -92,7 +92,10 @@ export function Native({
if (instructionAddr) {
DebugMetaStore.updateFilter(makeFilter(instructionAddr));
}
- scrollToElement('#images-loaded');
+
+ document
+ .getElementById(SectionKey.DEBUGMETA)
+ ?.scrollIntoView({block: 'start', behavior: 'smooth'});
}
const shouldShowLinkToImage =
diff --git a/static/app/components/events/interfaces/nativeFrame.tsx b/static/app/components/events/interfaces/nativeFrame.tsx
index 0d26af64759ccd..79727934bdbda6 100644
--- a/static/app/components/events/interfaces/nativeFrame.tsx
+++ b/static/app/components/events/interfaces/nativeFrame.tsx
@@ -1,7 +1,6 @@
import type {MouseEvent} from 'react';
import {Fragment, useContext, useState} from 'react';
import styled from '@emotion/styled';
-import scrollToElement from 'scroll-to-element';
import Tag from 'sentry/components/badge/tag';
import {Button} from 'sentry/components/button';
@@ -38,7 +37,11 @@ import type {
} from 'sentry/types/integrations';
import type {PlatformKey} from 'sentry/types/project';
import {defined} from 'sentry/utils';
+import {useSyncedLocalStorageState} from 'sentry/utils/useSyncedLocalStorageState';
import withSentryAppComponents from 'sentry/utils/withSentryAppComponents';
+import {SectionKey, useEventDetails} from 'sentry/views/issueDetails/streamline/context';
+import {getFoldSectionKey} from 'sentry/views/issueDetails/streamline/foldSection';
+import {useHasStreamlinedUI} from 'sentry/views/issueDetails/utils';
import type DebugImage from './debugMeta/debugImage';
import {combineStatus} from './debugMeta/utils';
@@ -101,6 +104,14 @@ function NativeFrame({
}: Props) {
const traceEventDataSectionContext = useContext(TraceEventDataSectionContext);
+ const {sectionData} = useEventDetails();
+ const debugSectionConfig = sectionData[SectionKey.DEBUGMETA];
+ const [_isCollapsed, setIsCollapsed] = useSyncedLocalStorageState(
+ getFoldSectionKey(SectionKey.DEBUGMETA),
+ debugSectionConfig?.initialCollapse ?? false
+ );
+ const hasStreamlinedUI = useHasStreamlinedUI();
+
const absolute = traceEventDataSectionContext?.display.includes('absolute-addresses');
const fullStackTrace = traceEventDataSectionContext?.fullStackTrace;
@@ -118,7 +129,9 @@ function NativeFrame({
const packageClickable =
!!frame.symbolicatorStatus &&
frame.symbolicatorStatus !== SymbolicatorStatus.UNKNOWN_IMAGE &&
- !isHoverPreviewed;
+ !isHoverPreviewed &&
+ // We know the debug section is rendered (only once streamline ui is enabled)
+ (hasStreamlinedUI ? !!debugSectionConfig : true);
const leadsToApp = !frame.inApp && (nextFrame?.inApp || !nextFrame);
const expandable =
@@ -229,6 +242,7 @@ function NativeFrame({
}
}
+ // This isn't possible when the page doesn't have the images loaded section
function handleGoToImagesLoaded(e: MouseEvent) {
e.stopPropagation(); // to prevent collapsing if collapsible
@@ -241,7 +255,15 @@ function NativeFrame({
DebugMetaStore.updateFilter(searchTerm);
}
- scrollToElement('#images-loaded');
+ if (hasStreamlinedUI) {
+ // Expand the section
+ setIsCollapsed(false);
+ }
+
+ // Scroll to the section
+ document
+ .getElementById(SectionKey.DEBUGMETA)
+ ?.scrollIntoView({block: 'start', behavior: 'smooth'});
}
function handleToggleContext(e: MouseEvent) {
From 206253be9098810c6296f2b1f277404169e89144 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 11:55:04 -0700
Subject: [PATCH 024/139] feat(issues): Add missing clojure prismjs extensions
(#78449)
---
static/app/utils/prism.tsx | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/static/app/utils/prism.tsx b/static/app/utils/prism.tsx
index 63e722a3077912..d1b13b196b2f91 100644
--- a/static/app/utils/prism.tsx
+++ b/static/app/utils/prism.tsx
@@ -55,6 +55,12 @@ const EXTRA_LANGUAGE_ALIASES: Record = {
bundle: 'javascript',
vue: 'javascript',
svelte: 'javascript',
+ 'js?': 'javascript',
+
+ // Clojure
+ clj: 'clojure',
+ cljc: 'clojure',
+ cljs: 'clojure',
};
export const getPrismLanguage = (lang: string) => {
From 2ee4296cb3f0887b1a153274fae9c6d3e94e8e7c Mon Sep 17 00:00:00 2001
From: George Gritsouk <989898+gggritso@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:55:21 -0400
Subject: [PATCH 025/139] ref(tsc): Convert `ProjectVelocityCard` to FC
(#78392)
Sibling to https://github.com/getsentry/sentry/pull/78294, part of
https://github.com/getsentry/frontend-tsc/issues/2
---------
Co-authored-by: Scott Cooper
Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com>
---
.../projectVelocityScoreCard.spec.tsx | 103 +++++
.../projectVelocityScoreCard.tsx | 380 ++++++++----------
2 files changed, 279 insertions(+), 204 deletions(-)
create mode 100644 static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.spec.tsx
diff --git a/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.spec.tsx b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.spec.tsx
new file mode 100644
index 00000000000000..25372bab2066c0
--- /dev/null
+++ b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.spec.tsx
@@ -0,0 +1,103 @@
+import {OrganizationFixture} from 'sentry-fixture/organization';
+
+import {render, screen} from 'sentry-test/reactTestingLibrary';
+
+import ProjectVelocityScoreCard from './projectVelocityScoreCard';
+
+describe('ProjectDetail > ProjectVelocity', function () {
+ const organization = OrganizationFixture();
+
+ const selection = {
+ projects: [1],
+ environments: [],
+ datetime: {
+ start: null,
+ end: null,
+ period: '14d',
+ utc: null,
+ },
+ };
+
+ afterEach(function () {
+ MockApiClient.clearMockResponses();
+ });
+
+ it('renders release count', async function () {
+ const previousDataEndpointMock = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/releases/stats/`,
+ body: Array.from({length: 98}).map((_item, index) => ({
+ version: `0.0.${index + 100}`,
+ })),
+ status: 200,
+ });
+
+ const currentDataEndpointMock = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/releases/stats/`,
+ body: Array.from({length: 202}).map((_item, index) => ({
+ version: `0.0.${index + 100}`,
+ })),
+ status: 200,
+ match: [MockApiClient.matchQuery({statsPeriod: '14d'})],
+ });
+
+ render(
+
+ );
+
+ expect(await screen.findByText('Number of Releases')).toBeInTheDocument();
+ expect(await screen.findByText('202')).toBeInTheDocument();
+ expect(await screen.findByText('104')).toBeInTheDocument();
+
+ expect(currentDataEndpointMock).toHaveBeenCalledTimes(1);
+ expect(currentDataEndpointMock).toHaveBeenNthCalledWith(
+ 1,
+ `/organizations/${organization.slug}/releases/stats/`,
+ expect.objectContaining({
+ query: {
+ environment: [],
+ project: 1,
+ statsPeriod: '14d',
+ },
+ })
+ );
+
+ expect(previousDataEndpointMock).toHaveBeenCalledTimes(1);
+ expect(previousDataEndpointMock).toHaveBeenNthCalledWith(
+ 1,
+ `/organizations/${organization.slug}/releases/stats/`,
+ expect.objectContaining({
+ query: {
+ environment: [],
+ project: 1,
+ start: '2017-09-19T02:41:20',
+ end: '2017-10-03T02:41:20',
+ },
+ })
+ );
+ });
+
+ it('renders without releases', async function () {
+ const dataEndpointMock = MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/releases/stats/`,
+ body: [],
+ status: 200,
+ });
+
+ render(
+
+ );
+
+ expect(await screen.findByRole('button', {name: 'Start Setup'})).toBeInTheDocument();
+ expect(await screen.findByRole('button', {name: 'Get Tour'})).toBeInTheDocument();
+
+ expect(dataEndpointMock).toHaveBeenCalledTimes(3);
+ });
+});
diff --git a/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
index afc5bcc74cb215..dbf32a5dec0009 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
@@ -1,8 +1,7 @@
import {Fragment} from 'react';
-import {fetchAnyReleaseExistence} from 'sentry/actionCreators/projects';
import {shouldFetchPreviousPeriod} from 'sentry/components/charts/utils';
-import DeprecatedAsyncComponent from 'sentry/components/deprecatedAsyncComponent';
+import LoadingError from 'sentry/components/loadingError';
import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse';
import ScoreCard from 'sentry/components/scoreCard';
import {parseStatsPeriod} from 'sentry/components/timeRangeSelector/utils';
@@ -12,6 +11,7 @@ import type {PageFilters} from 'sentry/types/core';
import type {Organization} from 'sentry/types/organization';
import {defined} from 'sentry/utils';
import {getPeriod} from 'sentry/utils/duration/getPeriod';
+import {useApiQuery} from 'sentry/utils/queryClient';
import MissingReleasesButtons from '../missingFeatureButtons/missingReleasesButtons';
@@ -19,225 +19,197 @@ const API_LIMIT = 1000;
type Release = {date: string; version: string};
-type Props = DeprecatedAsyncComponent['props'] & {
- isProjectStabilized: boolean;
- organization: Organization;
- selection: PageFilters;
- query?: string;
-};
-
-type State = DeprecatedAsyncComponent['state'] & {
- currentReleases: Release[] | null;
- noReleaseEver: boolean;
- previousReleases: Release[] | null;
-};
-
-class ProjectVelocityScoreCard extends DeprecatedAsyncComponent {
- shouldRenderBadRequests = true;
-
- getDefaultState() {
- return {
- ...super.getDefaultState(),
- currentReleases: null,
- previousReleases: null,
- noReleaseEver: false,
- };
- }
-
- getEndpoints() {
- const {organization, selection, isProjectStabilized, query} = this.props;
-
- if (!isProjectStabilized) {
- return [];
- }
-
- const {projects, environments, datetime} = selection;
- const {period} = datetime;
- const commonQuery = {
- environment: environments,
- project: projects[0],
- query,
- };
- const endpoints: ReturnType = [
- [
- 'currentReleases',
- `/organizations/${organization.slug}/releases/stats/`,
- {
- includeAllArgs: true,
- method: 'GET',
- query: {
- ...commonQuery,
- ...normalizeDateTimeParams(datetime),
- },
+const useReleaseCount = (props: Props) => {
+ const {organization, selection, isProjectStabilized, query} = props;
+
+ const isEnabled = isProjectStabilized;
+ const {projects, environments, datetime} = selection;
+ const {period} = datetime;
+
+ const {start: previousStart} = parseStatsPeriod(
+ getPeriod({period, start: undefined, end: undefined}, {shouldDoublePeriod: true})
+ .statsPeriod!
+ );
+
+ const {start: previousEnd} = parseStatsPeriod(
+ getPeriod({period, start: undefined, end: undefined}, {shouldDoublePeriod: false})
+ .statsPeriod!
+ );
+
+ const commonQuery = {
+ environment: environments,
+ project: projects[0],
+ query,
+ };
+
+ const currentQuery = useApiQuery(
+ [
+ `/organizations/${organization.slug}/releases/stats/`,
+ {
+ query: {
+ ...commonQuery,
+ ...normalizeDateTimeParams(datetime),
},
- ],
- ];
-
- if (
- shouldFetchPreviousPeriod({
- start: datetime.start,
- end: datetime.end,
- period: datetime.period,
- })
- ) {
- const {start: previousStart} = parseStatsPeriod(
- getPeriod({period, start: undefined, end: undefined}, {shouldDoublePeriod: true})
- .statsPeriod!
- );
-
- const {start: previousEnd} = parseStatsPeriod(
- getPeriod({period, start: undefined, end: undefined}, {shouldDoublePeriod: false})
- .statsPeriod!
- );
-
- endpoints.push([
- 'previousReleases',
- `/organizations/${organization.slug}/releases/stats/`,
- {
- query: {
- ...commonQuery,
- start: previousStart,
- end: previousEnd,
- },
+ },
+ ],
+ {staleTime: 0, enabled: isEnabled}
+ );
+
+ const isPreviousPeriodEnabled = shouldFetchPreviousPeriod({
+ start: datetime.start,
+ end: datetime.end,
+ period: datetime.period,
+ });
+
+ const previousQuery = useApiQuery(
+ [
+ `/organizations/${organization.slug}/releases/stats/`,
+ {
+ query: {
+ ...commonQuery,
+ start: previousStart,
+ end: previousEnd,
},
- ]);
- }
-
- return endpoints;
- }
-
- /**
- * If our releases are empty, determine if we had a release in the last 90 days (empty message differs then)
- */
- async onLoadAllEndpointsSuccess() {
- const {currentReleases, previousReleases} = this.state;
- const {organization, selection, isProjectStabilized} = this.props;
-
- if (!isProjectStabilized) {
- return;
+ },
+ ],
+ {
+ staleTime: 0,
+ enabled: isEnabled && isPreviousPeriodEnabled,
}
-
- if ([...(currentReleases ?? []), ...(previousReleases ?? [])].length !== 0) {
- this.setState({noReleaseEver: false});
- return;
- }
-
- this.setState({loading: true});
-
- const hasOlderReleases = await fetchAnyReleaseExistence(
- this.api,
- organization.slug,
- selection.projects[0]
- );
-
- this.setState({noReleaseEver: !hasOlderReleases, loading: false});
- }
-
- get cardTitle() {
- return t('Number of Releases');
- }
-
- get cardHelp() {
- return this.trend
- ? t(
- 'The number of releases for this project and how it has changed since the last period.'
- )
- : t('The number of releases for this project.');
- }
-
- get trend() {
- const {currentReleases, previousReleases} = this.state;
-
- if (!defined(currentReleases) || !defined(previousReleases)) {
- return null;
- }
-
- return currentReleases.length - previousReleases.length;
- }
-
- get trendStatus(): React.ComponentProps['trendStatus'] {
- if (!this.trend) {
- return undefined;
- }
-
- return this.trend > 0 ? 'good' : 'bad';
- }
-
- componentDidUpdate(prevProps: Props) {
- const {selection, isProjectStabilized, query} = this.props;
-
- if (
- prevProps.selection !== selection ||
- prevProps.isProjectStabilized !== isProjectStabilized ||
- prevProps.query !== query
- ) {
- this.remountComponent();
+ );
+
+ const allReleases = [...(currentQuery.data ?? []), ...(previousQuery.data ?? [])];
+
+ const isAllTimePeriodEnabled =
+ !currentQuery.isPending &&
+ !currentQuery.error &&
+ !previousQuery.isPending &&
+ !previousQuery.error &&
+ allReleases.length === 0;
+
+ const allTimeQuery = useApiQuery(
+ [
+ `/organizations/${organization.slug}/releases/stats/`,
+ {
+ query: {
+ ...commonQuery,
+ statsPeriod: '90d',
+ per_page: 1,
+ },
+ },
+ ],
+ {
+ staleTime: 0,
+ enabled: isEnabled && isAllTimePeriodEnabled,
}
- }
+ );
+
+ return {
+ data: currentQuery.data,
+ previousData: previousQuery.data,
+ allTimeData: allTimeQuery.data,
+ isLoading:
+ currentQuery.isPending ||
+ (previousQuery.isPending && isPreviousPeriodEnabled) ||
+ (allTimeQuery.isPending && isAllTimePeriodEnabled),
+ error: currentQuery.error || previousQuery.error || allTimeQuery.error,
+ refetch: () => {
+ currentQuery.refetch();
+ previousQuery.refetch();
+ allTimeQuery.refetch();
+ },
+ };
+};
- renderLoading() {
- return this.renderBody();
- }
+type Props = {
+ isProjectStabilized: boolean;
+ organization: Organization;
+ selection: PageFilters;
+ query?: string;
+};
- renderMissingFeatureCard() {
- const {organization} = this.props;
+function ProjectVelocityScoreCard(props: Props) {
+ const {organization} = props;
+
+ const {
+ data: currentReleases,
+ previousData: previousReleases,
+ allTimeData: allTimeReleases,
+ isLoading,
+ error,
+ refetch,
+ } = useReleaseCount(props);
+
+ const trend =
+ defined(currentReleases) &&
+ defined(previousReleases) &&
+ currentReleases?.length !== API_LIMIT
+ ? currentReleases.length - previousReleases.length
+ : undefined;
+
+ const shouldRenderTrend =
+ !isLoading && defined(currentReleases) && defined(previousReleases) && defined(trend);
+
+ const noReleaseEver =
+ [...(allTimeReleases ?? []), ...(previousReleases ?? []), ...(allTimeReleases ?? [])]
+ .length === 0;
+
+ const cardTitle = t('Number of Releases');
+
+ const cardHelp = trend
+ ? t(
+ 'The number of releases for this project and how it has changed since the last period.'
+ )
+ : t('The number of releases for this project.');
+
+ if (noReleaseEver) {
return (
}
/>
);
}
- renderScore() {
- const {currentReleases, loading} = this.state;
-
- if (loading || !defined(currentReleases)) {
- return '\u2014';
- }
-
- return currentReleases.length === API_LIMIT
- ? `${API_LIMIT - 1}+`
- : currentReleases.length;
- }
-
- renderTrend() {
- const {loading, currentReleases} = this.state;
-
- if (loading || !defined(this.trend) || currentReleases?.length === API_LIMIT) {
- return null;
- }
-
- return (
-
- {this.trend >= 0 ? (
-
- ) : (
-
- )}
- {Math.abs(this.trend)}
-
- );
- }
-
- renderBody() {
- const {noReleaseEver} = this.state;
-
- if (noReleaseEver) {
- return this.renderMissingFeatureCard();
- }
-
+ if (error) {
return (
-
);
}
+
+ return (
+
+ {trend >= 0 ? (
+
+ ) : (
+
+ )}
+ {Math.abs(trend)}
+
+ ) : null
+ }
+ trendStatus={!trend ? undefined : trend > 0 ? 'good' : 'bad'}
+ />
+ );
}
export default ProjectVelocityScoreCard;
From 5c626da86750dd16cd713451390d61fbcafe043c Mon Sep 17 00:00:00 2001
From: George Gritsouk <989898+gggritso@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:55:52 -0400
Subject: [PATCH 026/139] fix(dashboards): BigNumberWidget improvement grab-bag
II (#78288)
A few more nice changes, to support using this widget in Project Details
pages.
- `maximumValue` prop
- better stories
- better data handling
- remove description tooltip option
---
.../bigNumberWidget/bigNumberWidget.spec.tsx | 57 ++++++++-
.../bigNumberWidget.stories.tsx | 61 ++++++++--
.../bigNumberWidget/bigNumberWidget.tsx | 2 +-
.../bigNumberWidgetVisualization.tsx | 108 ++++++++++++------
.../differenceToPreviousPeriodData.tsx | 4 +-
.../widgets/bigNumberWidget/settings.tsx | 5 +-
.../widgets/common/widgetFrame.spec.tsx | 16 +--
.../dashboards/widgets/common/widgetFrame.tsx | 28 +----
8 files changed, 197 insertions(+), 84 deletions(-)
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
index f32e5ade822838..a6a8f97f9863a3 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
@@ -1,4 +1,5 @@
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+import {textWithMarkupMatcher} from 'sentry-test/utils';
import {BigNumberWidget} from 'sentry/views/dashboards/widgets/bigNumberWidget/bigNumberWidget';
@@ -9,7 +10,6 @@ describe('BigNumberWidget', () => {
{
});
describe('Visualization', () => {
+ it('Explains missing data', () => {
+ render(
+
+ );
+
+ expect(screen.getByText('No Data')).toBeInTheDocument();
+ });
+
+ it('Explains non-numeric data', () => {
+ render(
+
+ );
+
+ expect(screen.getByText('Value is not a finite number.')).toBeInTheDocument();
+ });
+
it('Formats duration data', () => {
render(
{
expect(screen.getByText('178451214')).toBeInTheDocument();
});
+
+ it('Respect maximum value', () => {
+ render(
+
+ );
+
+ expect(screen.getByText(textWithMarkupMatcher('>100m'))).toBeInTheDocument();
+ });
});
describe('State', () => {
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
index 7745fac6e9fd97..aff28d5312c085 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
@@ -2,6 +2,7 @@ import {Fragment} from 'react';
import styled from '@emotion/styled';
import JSXNode from 'sentry/components/stories/jsxNode';
+import JSXProperty from 'sentry/components/stories/jsxProperty';
import SideBySide from 'sentry/components/stories/sideBySide';
import SizingWindow from 'sentry/components/stories/sizingWindow';
import storyBook from 'sentry/stories/storyBook';
@@ -40,11 +41,6 @@ export default storyBook(BigNumberWidget, story => {
'eps()': 0.01087819860850493,
},
]}
- previousPeriodData={[
- {
- 'eps()': 0.01087819860850493,
- },
- ]}
meta={{
fields: {
'eps()': 'rate',
@@ -93,6 +89,32 @@ export default storyBook(BigNumberWidget, story => {
/>
+
+ The maximumValue
prop allows setting the maximum displayable value.
+ e.g., imagine a widget that displays a count. A count of more than a million is
+ too expensive for the API to compute, so the API returns a maximum of 1,000,000.
+ If the API returns exactly 1,000,000, that means the actual number is unknown,
+ something higher than the max. Setting{' '}
+ will show >1m.
+
+
+
+
+
+
);
});
@@ -106,15 +128,24 @@ export default storyBook(BigNumberWidget, story => {
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
+
);
@@ -129,6 +160,14 @@ export default storyBook(BigNumberWidget, story => {
next to the main value.
+
+ The preferredPolarity
prop controls the color of the comparison
+ string. Setting mean that a
+ higher number is better and will paint increases in the value green. Vice
+ versa with negative polarity. Omitting a preferred polarity will prevent
+ colorization.
+
+
;
}
- // Big Number widgets only show one number, so we only ever look at the first item in the Discover response
- const datum = data?.[0];
// TODO: Instrument getting more than one data key back as an error
+ // e.g., with data that looks like `[{'apdex()': 0.8}] this pulls out `"apdex()"` or `undefined`
+ const field = Object.keys(data?.[0] ?? {})[0];
+ const value = data?.[0]?.[field];
- if (isLoading || !defined(data) || !defined(datum) || Object.keys(datum).length === 0) {
+ if (isLoading) {
return (
-
-
- {NO_DATA_PLACEHOLDER}
-
-
+
+ {LOADING_PLACEHOLDER}
+
);
}
- const fields = Object.keys(datum);
- const field = fields[0];
+ if (!defined(value)) {
+ return (
+
+ {NO_DATA_PLACEHOLDER}
+
+ );
+ }
+
+ if (!Number.isFinite(value) || error) {
+ return ;
+ }
+
+ const parsedValue = Number(value);
// TODO: meta as MetaType is a white lie. `MetaType` doesn't know that types can be null, but they can!
const fieldRenderer = meta
? getFieldRenderer(field, meta as MetaType, false)
- : value => value.toString();
+ : renderableValue => renderableValue.toString();
+
+ const doesValueHitMaximum = maximumValue ? parsedValue >= maximumValue : false;
+ const clampedValue = Math.min(parsedValue, maximumValue);
+
+ const datum = {
+ [field]: clampedValue,
+ };
const unit = meta?.units?.[field];
+
const baggage = {
location,
organization,
@@ -68,29 +97,42 @@ export function BigNumberWidgetVisualization(props: Props) {
const rendered = fieldRenderer(datum, baggage);
+ return (
+
+
+
+
+ {doesValueHitMaximum ? '>' : ''}
+ {rendered}
+
+
+
+ {data && previousPeriodData && !doesValueHitMaximum && (
+
+ fieldRenderer(previousDatum, baggage)
+ }
+ field={field}
+ />
+ )}
+
+
+ );
+}
+
+function Wrapper({children}) {
return (
-
-
-
-
- {rendered}
-
-
-
- {previousPeriodData && (
-
- fieldRenderer(previousDatum, baggage)
- }
- field={field}
- />
- )}
-
-
+ {children}
);
}
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx
index d1752ef0350feb..9fe293cdfcd1a3 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx
@@ -11,7 +11,7 @@ import {IconArrow} from 'sentry/icons';
import {space} from 'sentry/styles/space';
import {
DEEMPHASIS_COLOR_NAME,
- NO_DATA_PLACEHOLDER,
+ LOADING_PLACEHOLDER,
} from 'sentry/views/dashboards/widgets/bigNumberWidget/settings';
import type {TableData} from 'sentry/views/dashboards/widgets/common/types';
@@ -34,7 +34,7 @@ export function DifferenceToPreviousPeriodData({
const previousValue = previousPeriodData[0][field];
if (!isNumber(currentValue) || !isNumber(previousValue)) {
- return {NO_DATA_PLACEHOLDER} ;
+ return {LOADING_PLACEHOLDER} ;
}
const difference = currentValue - previousValue;
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/settings.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/settings.tsx
index 561241d46a0d38..bf248bc32d0545 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/settings.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/settings.tsx
@@ -1,2 +1,5 @@
-export const NO_DATA_PLACEHOLDER = '\u2014';
+import {t} from 'sentry/locale';
+
+export const LOADING_PLACEHOLDER = '\u2014';
+export const NO_DATA_PLACEHOLDER = t('No Data');
export const DEEMPHASIS_COLOR_NAME = 'gray300';
diff --git a/static/app/views/dashboards/widgets/common/widgetFrame.spec.tsx b/static/app/views/dashboards/widgets/common/widgetFrame.spec.tsx
index 23c5a1248915d4..a21f8d146ff85c 100644
--- a/static/app/views/dashboards/widgets/common/widgetFrame.spec.tsx
+++ b/static/app/views/dashboards/widgets/common/widgetFrame.spec.tsx
@@ -4,17 +4,13 @@ import {WidgetFrame} from 'sentry/views/dashboards/widgets/common/widgetFrame';
describe('WidgetFrame', () => {
describe('Layout', () => {
- it('Renders the title and description', () => {
- render(
-
- );
+ it('Renders the title and description', async () => {
+ render( );
expect(screen.getByText('EPS')).toBeInTheDocument();
- expect(screen.getByText('Number of events per second')).toBeInTheDocument();
+
+ await userEvent.hover(screen.getByTestId('more-information'));
+ expect(await screen.findByText('Number of events per second')).toBeInTheDocument();
});
});
@@ -26,7 +22,6 @@ describe('WidgetFrame', () => {
{
-
+
{title}
- {description && showDescriptionInTooltip && (
+ {description && (
@@ -58,17 +57,6 @@ export function WidgetFrame(props: Props) {
)}
-
- {description && !showDescriptionInTooltip && (
-
- {description}
-
- )}
{children}
@@ -95,11 +83,9 @@ const Frame = styled('div')`
background: ${p => p.theme.background};
`;
-const Header = styled('div')<{showDescriptionInTooltip: boolean}>`
+const Header = styled('div')`
display: flex;
flex-direction: column;
-
- min-height: ${p => (p.showDescriptionInTooltip ? '' : '36px')};
`;
const Title = styled('div')`
@@ -108,12 +94,6 @@ const Title = styled('div')`
gap: ${space(0.75)};
`;
-const Description = styled('small')`
- ${p => p.theme.overflowEllipsis}
-
- color: ${p => p.theme.gray300};
-`;
-
const TitleText = styled(HeaderTitle)`
${p => p.theme.overflowEllipsis};
font-weight: ${p => p.theme.fontWeightBold};
From 07dd8abc2a1cc50b3115e62c70b8184875b46f5f Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 14:59:50 -0400
Subject: [PATCH 027/139] feat(uptime): Make interval a dropdown selection
(#78496)
---
.../rules/uptime/uptimeAlertForm.spec.tsx | 4 +++-
.../alerts/rules/uptime/uptimeAlertForm.tsx | 17 +++++++++++++++--
2 files changed, 18 insertions(+), 3 deletions(-)
diff --git a/static/app/views/alerts/rules/uptime/uptimeAlertForm.spec.tsx b/static/app/views/alerts/rules/uptime/uptimeAlertForm.spec.tsx
index 99a6c0bacfe882..db18aa4bd89339 100644
--- a/static/app/views/alerts/rules/uptime/uptimeAlertForm.spec.tsx
+++ b/static/app/views/alerts/rules/uptime/uptimeAlertForm.spec.tsx
@@ -166,6 +166,8 @@ describe('Uptime Alert Form', function () {
);
await screen.findByText('Configure Request');
+ await selectEvent.select(input('Interval'), 'Every 10 minutes');
+
await userEvent.clear(input('URL'));
await userEvent.type(input('URL'), 'http://another-url.com');
@@ -207,7 +209,7 @@ describe('Uptime Alert Form', function () {
['X-Another', 'Second Value'],
],
body: '{"different": "value"}',
- intervalSeconds: 60,
+ intervalSeconds: 60 * 10,
}),
})
);
diff --git a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
index 996724b7396e04..59338b0dc8e1bf 100644
--- a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
+++ b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
@@ -6,7 +6,6 @@ import {Observer} from 'mobx-react';
import {Button} from 'sentry/components/button';
import Confirm from 'sentry/components/confirm';
import FieldWrapper from 'sentry/components/forms/fieldGroup/fieldWrapper';
-import HiddenField from 'sentry/components/forms/fields/hiddenField';
import SelectField from 'sentry/components/forms/fields/selectField';
import SentryMemberTeamSelectorField from 'sentry/components/forms/fields/sentryMemberTeamSelectorField';
import SentryProjectSelectorField from 'sentry/components/forms/fields/sentryProjectSelectorField';
@@ -22,6 +21,7 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {Organization} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
+import getDuration from 'sentry/utils/duration/getDuration';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
@@ -40,6 +40,8 @@ interface Props {
const HTTP_METHOD_OPTIONS = ['GET', 'POST', 'HEAD', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'];
+const VALID_INTERVALS_SEC = [60 * 1, 60 * 5, 60 * 10, 60 * 20, 60 * 30, 60 * 60];
+
function getFormDataFromRule(rule: UptimeRule) {
return {
name: rule.name,
@@ -142,6 +144,18 @@ export function UptimeAlertForm({project, handleDelete, rule}: Props) {
+ ({
+ value,
+ label: t('Every %s', getDuration(value)),
+ }))}
+ name="intervalSeconds"
+ label={t('Interval')}
+ defaultValue={60}
+ flexibleControlStateSize
+ required
+ />
+
-
From bed920f31c0ceae347ac7e7a6124ed983fe72cb4 Mon Sep 17 00:00:00 2001
From: Colton Allen
Date: Wed, 2 Oct 2024 14:18:09 -0500
Subject: [PATCH 028/139] feat(flags): Add log endpoints (#78429)
---
src/sentry/api/api_owners.py | 1 +
src/sentry/api/urls.py | 16 +++
src/sentry/flags/docs/api.md | 30 +++--
src/sentry/flags/endpoints/logs.py | 91 +++++++++++++
src/sentry/flags/models.py | 20 +++
tests/sentry/flags/endpoints/test_logs.py | 150 ++++++++++++++++++++++
6 files changed, 294 insertions(+), 14 deletions(-)
create mode 100644 src/sentry/flags/endpoints/logs.py
create mode 100644 tests/sentry/flags/endpoints/test_logs.py
diff --git a/src/sentry/api/api_owners.py b/src/sentry/api/api_owners.py
index da89e1e004e224..ebe2043f8c6e3e 100644
--- a/src/sentry/api/api_owners.py
+++ b/src/sentry/api/api_owners.py
@@ -13,6 +13,7 @@ class ApiOwner(Enum):
ECOSYSTEM = "ecosystem"
ENTERPRISE = "enterprise"
FEEDBACK = "feedback-backend"
+ FLAG = "replay-backend"
HYBRID_CLOUD = "hybrid-cloud"
INTEGRATIONS = "product-owners-settings-integrations"
ISSUES = "issues"
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index 8f5ed98b9e0be5..a99cf0986ea9c3 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -74,6 +74,10 @@
DiscoverSavedQueryVisitEndpoint,
)
from sentry.flags.endpoints.hooks import OrganizationFlagsHooksEndpoint
+from sentry.flags.endpoints.logs import (
+ OrganizationFlagLogDetailsEndpoint,
+ OrganizationFlagLogIndexEndpoint,
+)
from sentry.incidents.endpoints.organization_alert_rule_activations import (
OrganizationAlertRuleActivationsEndpoint,
)
@@ -2033,11 +2037,23 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
OrganizationRelayUsage.as_view(),
name="sentry-api-0-organization-relay-usage",
),
+ # Flags
+ re_path(
+ r"^(?P[^\/]+)/flags/logs/$",
+ OrganizationFlagLogIndexEndpoint.as_view(),
+ name="sentry-api-0-organization-flag-logs",
+ ),
+ re_path(
+ r"^(?P[^\/]+)/flags/logs/(?P\d+)/$",
+ OrganizationFlagLogDetailsEndpoint.as_view(),
+ name="sentry-api-0-organization-flag-log",
+ ),
re_path(
r"^(?P[^\/]+)/flags/hooks/provider/(?P[\w-]+)/$",
OrganizationFlagsHooksEndpoint.as_view(),
name="sentry-api-0-organization-flag-hooks",
),
+ # Replays
re_path(
r"^(?P[^\/]+)/replays/$",
OrganizationReplayIndexEndpoint.as_view(),
diff --git a/src/sentry/flags/docs/api.md b/src/sentry/flags/docs/api.md
index 7a0487e2938539..5667acfaf81aa7 100644
--- a/src/sentry/flags/docs/api.md
+++ b/src/sentry/flags/docs/api.md
@@ -10,10 +10,9 @@ Host: https://sentry.io/api/0
This document is structured by resource with each resource having actions that can be performed against it. Every action that either accepts a request or returns a response WILL document the full interchange format. Clients may opt to restrict response data or provide a subset of the request data.
-## Flag Logs [/organizations//flag-log/]
+## Flag Logs [/organizations//flags/logs/]
- Parameters
- - query (optional, string) - Search query with space-separated field/value pairs. ie: `?query=environment:prod AND project:3`.
- start (optional, string) - ISO 8601 format (`YYYY-MM-DDTHH:mm:ss.sssZ`)
- end (optional, string) - ISO 8601 format. Required if `start` is set.
- statsPeriod (optional, string) - A positive integer suffixed with a unit type.
@@ -29,14 +28,15 @@ Retrieve a collection of flag logs.
**Attributes**
-| Column | Type | Description |
-| --------------- | ------ | ---------------------------------------------------- |
-| action | string | Enum of `created`, `updated`, or `deleted`. |
-| flag | string | The name of the flag changed. |
-| created_at | string | ISO-8601 timestamp of when the flag was changed. |
-| created_by | string | The user responsible for the change. |
-| created_by_type | string | Enum of `email`, `id`, or `name`. |
-| tags | object | A collection of provider-specified scoping metadata. |
+| Column | Type | Description |
+| --------------- | ------ | ------------------------------------------------------------- |
+| action | string | Enum of `created`, `updated`, or `deleted`. |
+| created_at | string | ISO-8601 timestamp of when the flag was changed. |
+| created_by | string | The user responsible for the change. |
+| created_by_type | string | Enum of `email`, `id`, or `name`. |
+| flag | string | The name of the flag changed. Maps to flag_log_id in the URI. |
+| id | number | A unique identifier for the log entry. |
+| tags | object | A collection of provider-specified scoping metadata. |
- Response 200
@@ -45,10 +45,11 @@ Retrieve a collection of flag logs.
"data": [
{
"action": "created",
- "flag": "my-flag-name",
"created_at": "2024-01-01T05:12:33",
"created_by": "2552",
"created_by_type": "id",
+ "flag": "my-flag-name",
+ "id": 1,
"tags": {
"environment": "production"
}
@@ -57,7 +58,7 @@ Retrieve a collection of flag logs.
}
```
-## Flag Log [/organizations//flag-log//]
+## Flag Log [/organizations//flags/logs//]
### Fetch Flag Log [GET]
@@ -69,10 +70,11 @@ Retrieve a single flag log instance.
{
"data": {
"action": "updated",
- "flag": "new-flag-name",
"created_at": "2024-11-19T19:12:55",
"created_by": "user@site.com",
"created_by_type": "email",
+ "flag": "new-flag-name",
+ "id": 1,
"tags": {
"environment": "development"
}
@@ -97,9 +99,9 @@ Flag pole is Sentry owned. It matches our audit-log resource because it is desig
"data": [
{
"action": "updated",
- "flag": "flag-name",
"created_at": "2024-11-19T19:12:55",
"created_by": "colton.allen@sentry.io",
+ "flag": "flag-name",
"tags": {
"commit_sha": "1f33a107d7cd060ab9c98e11c9e5a62dc1347861"
}
diff --git a/src/sentry/flags/endpoints/logs.py b/src/sentry/flags/endpoints/logs.py
new file mode 100644
index 00000000000000..e12633129de44e
--- /dev/null
+++ b/src/sentry/flags/endpoints/logs.py
@@ -0,0 +1,91 @@
+from datetime import datetime
+from typing import Any, TypedDict
+
+from rest_framework.exceptions import ParseError
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry import features
+from sentry.api.api_owners import ApiOwner
+from sentry.api.api_publish_status import ApiPublishStatus
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases.organization import OrganizationEndpoint
+from sentry.api.exceptions import ResourceDoesNotExist
+from sentry.api.paginator import OffsetPaginator
+from sentry.api.serializers import Serializer, register, serialize
+from sentry.api.utils import get_date_range_from_params
+from sentry.flags.models import ActionEnum, CreatedByTypeEnum, FlagAuditLogModel
+from sentry.models.organization import Organization
+
+
+class FlagAuditLogModelSerializerResponse(TypedDict):
+ id: int
+ action: str
+ created_at: datetime
+ created_by: str
+ created_by_type: str
+ flag: str
+ tags: dict[str, Any]
+
+
+@register(FlagAuditLogModel)
+class FlagAuditLogModelSerializer(Serializer):
+ def serialize(self, obj, attrs, user, **kwargs) -> FlagAuditLogModelSerializerResponse:
+ return {
+ "id": obj.id,
+ "action": ActionEnum.to_string(obj.action),
+ "created_at": obj.created_at.isoformat(),
+ "created_by": obj.created_by,
+ "created_by_type": CreatedByTypeEnum.to_string(obj.created_by_type),
+ "flag": obj.flag,
+ "tags": obj.tags,
+ }
+
+
+@region_silo_endpoint
+class OrganizationFlagLogIndexEndpoint(OrganizationEndpoint):
+ owner = ApiOwner.FLAG
+ publish_status = {"GET": ApiPublishStatus.PRIVATE}
+
+ def get(self, request: Request, organization: Organization) -> Response:
+ if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ raise ResourceDoesNotExist
+
+ start, end = get_date_range_from_params(request.GET)
+ if start is None or end is None:
+ raise ParseError(detail="Invalid date range")
+
+ queryset = FlagAuditLogModel.objects.filter(
+ created_at__gte=start,
+ created_at__lt=end,
+ organization_id=organization.id,
+ )
+
+ return self.paginate(
+ request=request,
+ queryset=queryset,
+ on_results=lambda x: {
+ "data": serialize(x, request.user, FlagAuditLogModelSerializer())
+ },
+ paginator_cls=OffsetPaginator,
+ )
+
+
+@region_silo_endpoint
+class OrganizationFlagLogDetailsEndpoint(OrganizationEndpoint):
+ owner = ApiOwner.FLAG
+ publish_status = {"GET": ApiPublishStatus.PRIVATE}
+
+ def get(self, request: Request, organization: Organization, flag_log_id: int) -> Response:
+ if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ raise ResourceDoesNotExist
+
+ try:
+ model = FlagAuditLogModel.objects.filter(
+ id=flag_log_id,
+ organization_id=organization.id,
+ ).first()
+ except FlagAuditLogModel.DoesNotExist:
+ raise ResourceDoesNotExist
+
+ return self.respond({"data": serialize(model, request.user, FlagAuditLogModelSerializer())})
diff --git a/src/sentry/flags/models.py b/src/sentry/flags/models.py
index 03baf681bdbb59..80852d7cc89e5d 100644
--- a/src/sentry/flags/models.py
+++ b/src/sentry/flags/models.py
@@ -13,6 +13,16 @@ class ActionEnum(Enum):
DELETED = 1
UPDATED = 2
+ @classmethod
+ def to_string(cls, integer):
+ if integer == 0:
+ return "created"
+ if integer == 1:
+ return "deleted"
+ if integer == 2:
+ return "updated"
+ raise ValueError
+
ACTION_MAP = {
"created": ActionEnum.CREATED.value,
@@ -26,6 +36,16 @@ class CreatedByTypeEnum(Enum):
ID = 1
NAME = 2
+ @classmethod
+ def to_string(cls, integer):
+ if integer == 0:
+ return "email"
+ if integer == 1:
+ return "id"
+ if integer == 2:
+ return "name"
+ raise ValueError
+
CREATED_BY_TYPE_MAP = {
"email": CreatedByTypeEnum.EMAIL.value,
diff --git a/tests/sentry/flags/endpoints/test_logs.py b/tests/sentry/flags/endpoints/test_logs.py
new file mode 100644
index 00000000000000..d2d521fb6d76ef
--- /dev/null
+++ b/tests/sentry/flags/endpoints/test_logs.py
@@ -0,0 +1,150 @@
+from datetime import datetime, timezone
+
+from django.urls import reverse
+
+from sentry.flags.models import FlagAuditLogModel
+from sentry.testutils.cases import APITestCase
+
+
+class OrganizationFlagLogIndexEndpointTestCase(APITestCase):
+ endpoint = "sentry-api-0-organization-flag-logs"
+
+ def setUp(self):
+ super().setUp()
+ self.login_as(user=self.user)
+ self.url = reverse(self.endpoint, args=(self.organization.id,))
+
+ @property
+ def features(self):
+ return {"organizations:feature-flag-ui": True}
+
+ def test_get(self):
+ model = FlagAuditLogModel(
+ action=0,
+ created_at=datetime.now(timezone.utc),
+ created_by="a@b.com",
+ created_by_type=0,
+ flag="hello",
+ organization_id=self.organization.id,
+ tags={"commit_sha": "123"},
+ )
+ model.save()
+
+ with self.feature(self.features):
+ response = self.client.get(self.url)
+ assert response.status_code == 200
+
+ result = response.json()
+ assert len(result["data"]) == 1
+ assert result["data"][0]["id"] == 1
+ assert result["data"][0]["action"] == "created"
+ assert "created_at" in result["data"][0]
+ assert result["data"][0]["created_by"] == "a@b.com"
+ assert result["data"][0]["created_by_type"] == "email"
+ assert result["data"][0]["flag"] == "hello"
+ assert result["data"][0]["tags"] == {"commit_sha": "123"}
+
+ def test_get_unauthorized_organization(self):
+ org = self.create_organization()
+ url = reverse(self.endpoint, args=(org.id,))
+
+ with self.feature(self.features):
+ response = self.client.get(url)
+ assert response.status_code == 403
+
+ def test_get_feature_disabled(self):
+ response = self.client.get(self.url)
+ assert response.status_code == 404
+
+ def test_get_stats_period(self):
+ model = FlagAuditLogModel(
+ action=0,
+ created_at=datetime.now(timezone.utc),
+ created_by="a@b.com",
+ created_by_type=0,
+ flag="hello",
+ organization_id=self.organization.id,
+ tags={"commit_sha": "123"},
+ )
+ model.save()
+
+ with self.feature(self.features):
+ response = self.client.get(self.url + "?statsPeriod=14d")
+ assert response.status_code == 200
+ assert len(response.json()["data"]) == 1
+
+ def test_get_start_end(self):
+ model = FlagAuditLogModel(
+ action=0,
+ created_at=datetime(2024, 1, 5, tzinfo=timezone.utc),
+ created_by="a@b.com",
+ created_by_type=0,
+ flag="hello",
+ organization_id=self.organization.id,
+ tags={"commit_sha": "123"},
+ )
+ model.save()
+
+ start = datetime(2024, 1, 4, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 6, tzinfo=timezone.utc)
+
+ with self.feature(self.features):
+ response = self.client.get(
+ self.url + f"?start={start.timestamp()}&end={end.timestamp()}"
+ )
+ assert response.status_code == 200
+ assert len(response.json()["data"]) == 1
+
+
+class OrganizationFlagLogDetailsEndpointTestCase(APITestCase):
+ endpoint = "sentry-api-0-organization-flag-log"
+
+ def setUp(self):
+ super().setUp()
+ self.flag = FlagAuditLogModel(
+ action=0,
+ created_at=datetime.now(timezone.utc),
+ created_by="a@b.com",
+ created_by_type=0,
+ flag="hello",
+ organization_id=self.organization.id,
+ tags={"commit_sha": "123"},
+ )
+ self.flag.save()
+
+ self.login_as(user=self.user)
+ self.url = reverse(self.endpoint, args=(self.organization.id, self.flag.id))
+
+ @property
+ def features(self):
+ return {"organizations:feature-flag-ui": True}
+
+ def test_get(self):
+ with self.feature(self.features):
+ response = self.client.get(self.url)
+ assert response.status_code == 200
+
+ result = response.json()
+ assert result["data"]["id"] == 4
+ assert result["data"]["action"] == "created"
+ assert "created_at" in result["data"]
+ assert result["data"]["created_by"] == "a@b.com"
+ assert result["data"]["created_by_type"] == "email"
+ assert result["data"]["flag"] == "hello"
+ assert result["data"]["tags"] == {"commit_sha": "123"}
+
+ def test_get_unauthorized_organization(self):
+ org = self.create_organization()
+ url = reverse(self.endpoint, args=(org.id, 123))
+
+ with self.feature(self.features):
+ response = self.client.get(url)
+ assert response.status_code == 403
+
+ def test_get_no_flag(self):
+ response = self.client.get(reverse(self.endpoint, args=(self.organization.id, 123)))
+ assert response.status_code == 404
+
+ def test_get_feature_disabled(self):
+ response = self.client.get(self.url)
+ assert response.status_code == 404
From 951da4137a49fd2d11c5e17e9d32b30741228283 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 15:39:50 -0400
Subject: [PATCH 029/139] test(migration): Mark old migration tests as skipped
(#78497)
---
.../test_0645_backfill_add_uuid_to_all_rule_actions.py | 1 +
...test_0729_backfill_groupsearchviews_with_pinned_searches.py | 1 +
.../migrations/test_0730_add_subscription_fk_to_incident.py | 1 +
.../test_0739_backfill_group_info_to_group_attributes.py | 1 +
.../migrations/test_0742_backfill_alertrule_detection_type.py | 3 +++
...0743_backfill_broken_monitor_notification_setting_option.py | 3 +++
.../test_0752_fix_substatus_for_unresolved_groups.py | 2 ++
.../migrations/test_0753_fix_substatus_for_ignored_groups.py | 3 +++
.../migrations/test_0764_migrate_bad_status_substatus_rows.py | 2 ++
.../migrations/test_0766_fix_substatus_for_pending_merge.py | 3 +++
.../migrations/test_0768_fix_old_group_first_seen_dates.py | 3 +++
11 files changed, 23 insertions(+)
diff --git a/tests/sentry/migrations/test_0645_backfill_add_uuid_to_all_rule_actions.py b/tests/sentry/migrations/test_0645_backfill_add_uuid_to_all_rule_actions.py
index 2c6b11e3a43c16..209bcad10d9d6a 100644
--- a/tests/sentry/migrations/test_0645_backfill_add_uuid_to_all_rule_actions.py
+++ b/tests/sentry/migrations/test_0645_backfill_add_uuid_to_all_rule_actions.py
@@ -5,6 +5,7 @@
from sentry.testutils.cases import TestMigrations
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class BackfillAddUuidToAllRuleActions(TestMigrations):
migrate_from = "0644_backfill_priority_for_groups"
migrate_to = "0645_backfill_add_uuid_to_all_rule_actions"
diff --git a/tests/sentry/migrations/test_0729_backfill_groupsearchviews_with_pinned_searches.py b/tests/sentry/migrations/test_0729_backfill_groupsearchviews_with_pinned_searches.py
index b444e738cbdb0d..5973022d25a28e 100644
--- a/tests/sentry/migrations/test_0729_backfill_groupsearchviews_with_pinned_searches.py
+++ b/tests/sentry/migrations/test_0729_backfill_groupsearchviews_with_pinned_searches.py
@@ -5,6 +5,7 @@
from sentry.testutils.cases import TestMigrations
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class BackfillGroupSearchViewsWithPinnedSearchesTest(TestMigrations):
migrate_from = "0728_incident_subscription_fk"
migrate_to = "0729_backfill_groupsearchviews_with_pinned_searches"
diff --git a/tests/sentry/migrations/test_0730_add_subscription_fk_to_incident.py b/tests/sentry/migrations/test_0730_add_subscription_fk_to_incident.py
index ce9fea5cce7467..caa91ee9fbab17 100644
--- a/tests/sentry/migrations/test_0730_add_subscription_fk_to_incident.py
+++ b/tests/sentry/migrations/test_0730_add_subscription_fk_to_incident.py
@@ -9,6 +9,7 @@
from sentry.testutils.cases import TestMigrations
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class AlertRuleProjectBackfillTest(TestMigrations):
migrate_from = "0729_backfill_groupsearchviews_with_pinned_searches"
migrate_to = "0730_add_subscription_fk_to_incident"
diff --git a/tests/sentry/migrations/test_0739_backfill_group_info_to_group_attributes.py b/tests/sentry/migrations/test_0739_backfill_group_info_to_group_attributes.py
index 734bb9d140325a..22d87f857f556f 100644
--- a/tests/sentry/migrations/test_0739_backfill_group_info_to_group_attributes.py
+++ b/tests/sentry/migrations/test_0739_backfill_group_info_to_group_attributes.py
@@ -39,6 +39,7 @@ def run_test(expected_groups):
}
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class TestBackfillGroupAttributes(SnubaTestCase, TestMigrations):
migrate_from = "0738_rm_reprocessing_step3"
migrate_to = "0739_backfill_group_info_to_group_attributes"
diff --git a/tests/sentry/migrations/test_0742_backfill_alertrule_detection_type.py b/tests/sentry/migrations/test_0742_backfill_alertrule_detection_type.py
index 4075f3c85a75a6..021f7d80277e85 100644
--- a/tests/sentry/migrations/test_0742_backfill_alertrule_detection_type.py
+++ b/tests/sentry/migrations/test_0742_backfill_alertrule_detection_type.py
@@ -1,5 +1,7 @@
import uuid
+import pytest
+
from sentry.incidents.logic import create_alert_rule
from sentry.incidents.models.alert_rule import AlertRuleDetectionType, AlertRuleThresholdType
from sentry.models.organization import Organization
@@ -7,6 +9,7 @@
from sentry.testutils.cases import TestMigrations
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class TestBackfillDetectionType(TestMigrations):
migrate_from = "0741_metric_alert_anomaly_detection"
migrate_to = "0742_backfill_alertrule_detection_type"
diff --git a/tests/sentry/migrations/test_0743_backfill_broken_monitor_notification_setting_option.py b/tests/sentry/migrations/test_0743_backfill_broken_monitor_notification_setting_option.py
index d4c79a35ef881f..680cab38294a76 100644
--- a/tests/sentry/migrations/test_0743_backfill_broken_monitor_notification_setting_option.py
+++ b/tests/sentry/migrations/test_0743_backfill_broken_monitor_notification_setting_option.py
@@ -1,11 +1,14 @@
from uuid import uuid4
+import pytest
+
from sentry.models.notificationsettingoption import NotificationSettingOption
from sentry.testutils.cases import TestMigrations
from sentry.testutils.silo import control_silo_test
@control_silo_test
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class BackfillBrokenMonitorNotificationSettingOptionTest(TestMigrations):
migrate_from = "0742_backfill_alertrule_detection_type"
migrate_to = "0743_backfill_broken_monitor_notification_setting_option"
diff --git a/tests/sentry/migrations/test_0752_fix_substatus_for_unresolved_groups.py b/tests/sentry/migrations/test_0752_fix_substatus_for_unresolved_groups.py
index 41cb6f402e3296..dfadbc5d18a6da 100644
--- a/tests/sentry/migrations/test_0752_fix_substatus_for_unresolved_groups.py
+++ b/tests/sentry/migrations/test_0752_fix_substatus_for_unresolved_groups.py
@@ -1,5 +1,6 @@
from datetime import timedelta
+import pytest
from django.utils import timezone
from sentry.models.group import Group, GroupStatus
@@ -9,6 +10,7 @@
from sentry.types.group import GroupSubStatus
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class BackfillMissingUnresolvedSubstatusTest(TestMigrations):
migrate_from = "0751_grouphashmetadata_use_one_to_one_field_for_grouphash"
migrate_to = "0752_fix_substatus_for_unresolved_groups"
diff --git a/tests/sentry/migrations/test_0753_fix_substatus_for_ignored_groups.py b/tests/sentry/migrations/test_0753_fix_substatus_for_ignored_groups.py
index d0e0ace5764b48..9ae7b485dde3f4 100644
--- a/tests/sentry/migrations/test_0753_fix_substatus_for_ignored_groups.py
+++ b/tests/sentry/migrations/test_0753_fix_substatus_for_ignored_groups.py
@@ -1,3 +1,5 @@
+import pytest
+
from sentry.models.activity import Activity
from sentry.models.group import Group, GroupStatus
from sentry.models.groupsnooze import GroupSnooze
@@ -7,6 +9,7 @@
from sentry.types.group import GroupSubStatus
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class FixSubstatusForIgnoreedGroupsTest(TestMigrations):
migrate_from = "0752_fix_substatus_for_unresolved_groups"
migrate_to = "0753_fix_substatus_for_ignored_groups"
diff --git a/tests/sentry/migrations/test_0764_migrate_bad_status_substatus_rows.py b/tests/sentry/migrations/test_0764_migrate_bad_status_substatus_rows.py
index 06d0a6cf457299..cf3aa03bd28bd5 100644
--- a/tests/sentry/migrations/test_0764_migrate_bad_status_substatus_rows.py
+++ b/tests/sentry/migrations/test_0764_migrate_bad_status_substatus_rows.py
@@ -1,5 +1,6 @@
from datetime import timedelta
+import pytest
from django.utils import timezone
from sentry.models.activity import Activity
@@ -12,6 +13,7 @@
from sentry.types.group import GroupSubStatus
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class BackfillMissingUnresolvedSubstatusTest(TestMigrations):
migrate_from = "0763_add_created_by_to_broadcasts"
migrate_to = "0764_migrate_bad_status_substatus_rows"
diff --git a/tests/sentry/migrations/test_0766_fix_substatus_for_pending_merge.py b/tests/sentry/migrations/test_0766_fix_substatus_for_pending_merge.py
index 1ebe7dee293548..453614aa198767 100644
--- a/tests/sentry/migrations/test_0766_fix_substatus_for_pending_merge.py
+++ b/tests/sentry/migrations/test_0766_fix_substatus_for_pending_merge.py
@@ -1,9 +1,12 @@
+import pytest
+
from sentry.models.group import Group, GroupStatus
from sentry.models.organization import Organization
from sentry.testutils.cases import TestMigrations
from sentry.types.group import GroupSubStatus
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class FixPendingMergeSubstatusTest(TestMigrations):
migrate_from = "0765_add_org_to_api_auth"
migrate_to = "0766_fix_substatus_for_pending_merge"
diff --git a/tests/sentry/migrations/test_0768_fix_old_group_first_seen_dates.py b/tests/sentry/migrations/test_0768_fix_old_group_first_seen_dates.py
index 1e9ee74806c452..cf965a8f9e03e6 100644
--- a/tests/sentry/migrations/test_0768_fix_old_group_first_seen_dates.py
+++ b/tests/sentry/migrations/test_0768_fix_old_group_first_seen_dates.py
@@ -1,10 +1,13 @@
from datetime import datetime, timezone
+import pytest
+
from sentry.models.group import Group
from sentry.models.organization import Organization
from sentry.testutils.cases import TestMigrations
+@pytest.mark.skip("Migration is no longer runnable. Retain until migration is removed.")
class FixOldGroupFirstSeenDates(TestMigrations):
migrate_from = "0767_add_selected_aggregate_to_dashboards_widget_query"
migrate_to = "0768_fix_old_group_first_seen_dates"
From 7ec9523490187f489d35d374e45d63ec8afb1bc3 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 15:51:34 -0400
Subject: [PATCH 030/139] ref(uptiem): Use empty [] over {} as db default for
headers (#78502)
Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com>
---
migrations_lockfile.txt | 2 +-
.../0015_headers_deafult_empty_list.py | 34 +++++++++++++++++++
src/sentry/uptime/models.py | 2 +-
3 files changed, 36 insertions(+), 2 deletions(-)
create mode 100644 src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index 081b83d191e1b0..f43e5ba7cd5eb5 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -12,5 +12,5 @@ remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
sentry: 0770_increase_project_slug_max_length
social_auth: 0002_default_auto_field
-uptime: 0014_add_uptime_enviromnet
+uptime: 0015_headers_deafult_empty_list
workflow_engine: 0008_detector_state
diff --git a/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py b/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py
new file mode 100644
index 00000000000000..1226be4d8b055c
--- /dev/null
+++ b/src/sentry/uptime/migrations/0015_headers_deafult_empty_list.py
@@ -0,0 +1,34 @@
+# Generated by Django 5.1.1 on 2024-10-02 19:09
+
+from django.db import migrations
+
+import sentry.db.models.fields.jsonfield
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0014_add_uptime_enviromnet"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="uptimesubscription",
+ name="headers",
+ field=sentry.db.models.fields.jsonfield.JSONField(db_default=[], default=dict),
+ ),
+ ]
diff --git a/src/sentry/uptime/models.py b/src/sentry/uptime/models.py
index 42b0a6a9c12d5c..e8fe893390f882 100644
--- a/src/sentry/uptime/models.py
+++ b/src/sentry/uptime/models.py
@@ -54,7 +54,7 @@ class UptimeSubscription(BaseRemoteSubscription, DefaultFieldsModelExisting):
# HTTP method to perform the check with
method = models.CharField(max_length=20, db_default="GET")
# HTTP headers to send when performing the check
- headers = JSONField(json_dumps=headers_json_encoder, db_default={})
+ headers = JSONField(json_dumps=headers_json_encoder, db_default=[])
# HTTP body to send when performing the check
body = models.TextField(null=True)
From d4d5718f3146500f8ef63a7228fdb73fb019d801 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 15:53:57 -0400
Subject: [PATCH 031/139] ref(uptime): Use `[]` for headers in test fixtures
(#78503)
---
src/sentry/testutils/fixtures.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py
index 5956b8afb3c54a..84f5661b2baaeb 100644
--- a/src/sentry/testutils/fixtures.py
+++ b/src/sentry/testutils/fixtures.py
@@ -691,7 +691,7 @@ def create_uptime_subscription(
if date_updated is None:
date_updated = timezone.now()
if headers is None:
- headers = {}
+ headers = []
return Factories.create_uptime_subscription(
type=type,
From 962a33e4e343ce42bd024ae7f98456394b71711a Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:03:54 -0400
Subject: [PATCH 032/139] ref(uptime): Better formatting of VALID_INTERVALS_SEC
(#78507)
---
.../app/views/alerts/rules/uptime/uptimeAlertForm.tsx | 11 ++++++++++-
1 file changed, 10 insertions(+), 1 deletion(-)
diff --git a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
index 59338b0dc8e1bf..c312e69d37b171 100644
--- a/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
+++ b/static/app/views/alerts/rules/uptime/uptimeAlertForm.tsx
@@ -40,7 +40,16 @@ interface Props {
const HTTP_METHOD_OPTIONS = ['GET', 'POST', 'HEAD', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'];
-const VALID_INTERVALS_SEC = [60 * 1, 60 * 5, 60 * 10, 60 * 20, 60 * 30, 60 * 60];
+const MINUTE = 60;
+
+const VALID_INTERVALS_SEC = [
+ MINUTE * 1,
+ MINUTE * 5,
+ MINUTE * 10,
+ MINUTE * 20,
+ MINUTE * 30,
+ MINUTE * 60,
+];
function getFormDataFromRule(rule: UptimeRule) {
return {
From ed7dcd1445101b334d38efcfe44fb9cc6c4893ad Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:11:02 -0400
Subject: [PATCH 033/139] test(uptime): Always set env for tests (#78500)
---
src/sentry/testutils/fixtures.py | 2 ++
tests/sentry/uptime/test_issue_platform.py | 2 +-
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py
index 84f5661b2baaeb..e9226424f2333c 100644
--- a/src/sentry/testutils/fixtures.py
+++ b/src/sentry/testutils/fixtures.py
@@ -721,6 +721,8 @@ def create_project_uptime_subscription(
) -> ProjectUptimeSubscription:
if project is None:
project = self.project
+ if env is None:
+ env = self.environment
if uptime_subscription is None:
uptime_subscription = self.create_uptime_subscription()
diff --git a/tests/sentry/uptime/test_issue_platform.py b/tests/sentry/uptime/test_issue_platform.py
index 1c771911a1615a..757b909368eb99 100644
--- a/tests/sentry/uptime/test_issue_platform.py
+++ b/tests/sentry/uptime/test_issue_platform.py
@@ -94,7 +94,7 @@ def test(self):
level="error",
culprit="",
)
- project_subscription = self.create_project_uptime_subscription(env=self.environment)
+ project_subscription = self.create_project_uptime_subscription()
event_data = build_event_data_for_occurrence(result, project_subscription, occurrence)
assert event_data == {
"environment": "development",
From b572891dc5685268bbb0cb790d9023238af0b754 Mon Sep 17 00:00:00 2001
From: Evan Hicks
Date: Wed, 2 Oct 2024 16:16:41 -0400
Subject: [PATCH 034/139] fix(eap) Separate the RPC code from the other snuba
code (#78476)
Move the RPC endpoint and code into a separate file. Also create a new
threadpool for the RPC
to use. This won't result in more work, queries are sent either to the
RPC or the SnQL endpoint
but not both.
---
.../endpoints/organization_spans_fields.py | 4 +-
.../querying/eap/mql_eap_bridge.py | 4 +-
src/sentry/utils/snuba.py | 64 +----------------
src/sentry/utils/snuba_rpc.py | 69 +++++++++++++++++++
4 files changed, 74 insertions(+), 67 deletions(-)
create mode 100644 src/sentry/utils/snuba_rpc.py
diff --git a/src/sentry/api/endpoints/organization_spans_fields.py b/src/sentry/api/endpoints/organization_spans_fields.py
index d3697277dbae73..28f7ec8c478823 100644
--- a/src/sentry/api/endpoints/organization_spans_fields.py
+++ b/src/sentry/api/endpoints/organization_spans_fields.py
@@ -29,7 +29,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.referrer import Referrer
from sentry.tagstore.types import TagKey, TagValue
-from sentry.utils import snuba
+from sentry.utils import snuba_rpc
# This causes problems if a user sends an attribute with any of these values
# but the meta table currently can't handle that anyways
@@ -114,7 +114,7 @@ def get(self, request: Request, organization) -> Response:
offset=0,
type=AttributeKey.Type.TYPE_STRING,
)
- rpc_response = snuba.rpc(rpc_request, TraceItemAttributesResponse)
+ rpc_response = snuba_rpc.rpc(rpc_request, TraceItemAttributesResponse)
paginator = ChainPaginator(
[
diff --git a/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py b/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
index f6383dc9e27745..347322458c8930 100644
--- a/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
+++ b/src/sentry/sentry_metrics/querying/eap/mql_eap_bridge.py
@@ -24,7 +24,7 @@
from sentry.models.organization import Organization
from sentry.models.project import Project
-from sentry.utils import snuba
+from sentry.utils import snuba_rpc
def parse_mql_filters(group: ConditionGroup) -> Iterable[TraceItemFilter]:
@@ -94,7 +94,7 @@ def make_eap_request(
name=ts.metric.mri.split("/")[1].split("@")[0], type=AttributeKey.TYPE_FLOAT
),
)
- aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
+ aggregate_resp = snuba_rpc.rpc(aggregate_req, AggregateBucketResponse)
series_data = list(aggregate_resp.result)
duration = end - start
diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py
index 6e466a9deb1654..bc112c9606a479 100644
--- a/src/sentry/utils/snuba.py
+++ b/src/sentry/utils/snuba.py
@@ -13,17 +13,15 @@
from copy import deepcopy
from datetime import datetime, timedelta, timezone
from hashlib import sha1
-from typing import Any, Protocol, TypeVar
+from typing import Any
from urllib.parse import urlparse
-import sentry_protos.snuba.v1alpha.request_common_pb2
import sentry_sdk
import sentry_sdk.scope
import urllib3
from dateutil.parser import parse as parse_datetime
from django.conf import settings
from django.core.cache import cache
-from google.protobuf.message import Message as ProtobufMessage
from snuba_sdk import DeleteQuery, MetricsQuery, Request
from snuba_sdk.legacy import json_to_snql
@@ -1211,66 +1209,6 @@ def _log_request_query(req: Request) -> None:
)
-RPCResponseType = TypeVar("RPCResponseType", bound=ProtobufMessage)
-
-
-class SnubaRPCRequest(Protocol):
- def SerializeToString(self, deterministic: bool = ...) -> bytes:
- ...
-
- @property
- def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta:
- ...
-
-
-def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseType:
- """
- You want to call a snuba RPC. Here's how you do it:
-
- start_time_proto = ProtobufTimestamp()
- start_time_proto.FromDatetime(start)
- end_time_proto = ProtobufTimestamp()
- end_time_proto.FromDatetime(end)
- aggregate_req = AggregateBucketRequest(
- meta=RequestMeta(
- organization_id=organization.id,
- cogs_category="events_analytics_platform",
- referrer=referrer,
- project_ids=[project.id for project in projects],
- start_timestamp=start_time_proto,
- end_timestamp=end_time_proto,
- ),
- aggregate=AggregateBucketRequest.FUNCTION_SUM,
- filter=TraceItemFilter(
- comparison_filter=ComparisonFilter(
- key=AttributeKey(name="op", type=AttributeKey.Type.TYPE_STRING),
- value=AttributeValue(val_str="ai.run"),
- )
- ),
- granularity_secs=60,
- key=AttributeKey(
- name="duration", type=AttributeKey.TYPE_FLOAT
- ),
- attribute_key_transform_context=AttributeKeyTransformContext(),
- )
- aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
- """
- referrer = req.meta.referrer
- with sentry_sdk.start_span(op="snuba_rpc.run", description=req.__class__.__name__) as span:
- span.set_tag("snuba.referrer", referrer)
- http_resp = _snuba_pool.urlopen(
- "POST",
- f"/rpc/{req.__class__.__name__}",
- body=req.SerializeToString(),
- headers={
- "referer": referrer,
- },
- )
- resp = resp_type()
- resp.ParseFromString(http_resp.data)
- return resp
-
-
RawResult = tuple[str, urllib3.response.HTTPResponse, Translator, Translator]
diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py
new file mode 100644
index 00000000000000..dc686ae9e78a20
--- /dev/null
+++ b/src/sentry/utils/snuba_rpc.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+from typing import Protocol, TypeVar
+
+import sentry_protos.snuba.v1alpha.request_common_pb2
+import sentry_sdk
+import sentry_sdk.scope
+from google.protobuf.message import Message as ProtobufMessage
+
+from sentry.utils.snuba import _snuba_pool
+
+RPCResponseType = TypeVar("RPCResponseType", bound=ProtobufMessage)
+
+
+class SnubaRPCRequest(Protocol):
+ def SerializeToString(self, deterministic: bool = ...) -> bytes:
+ ...
+
+ @property
+ def meta(self) -> sentry_protos.snuba.v1alpha.request_common_pb2.RequestMeta:
+ ...
+
+
+def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseType:
+ """
+ You want to call a snuba RPC. Here's how you do it:
+
+ start_time_proto = ProtobufTimestamp()
+ start_time_proto.FromDatetime(start)
+ end_time_proto = ProtobufTimestamp()
+ end_time_proto.FromDatetime(end)
+ aggregate_req = AggregateBucketRequest(
+ meta=RequestMeta(
+ organization_id=organization.id,
+ cogs_category="events_analytics_platform",
+ referrer=referrer,
+ project_ids=[project.id for project in projects],
+ start_timestamp=start_time_proto,
+ end_timestamp=end_time_proto,
+ ),
+ aggregate=AggregateBucketRequest.FUNCTION_SUM,
+ filter=TraceItemFilter(
+ comparison_filter=ComparisonFilter(
+ key=AttributeKey(name="op", type=AttributeKey.Type.TYPE_STRING),
+ value=AttributeValue(val_str="ai.run"),
+ )
+ ),
+ granularity_secs=60,
+ key=AttributeKey(
+ name="duration", type=AttributeKey.TYPE_FLOAT
+ ),
+ attribute_key_transform_context=AttributeKeyTransformContext(),
+ )
+ aggregate_resp = snuba.rpc(aggregate_req, AggregateBucketResponse)
+ """
+ referrer = req.meta.referrer
+ with sentry_sdk.start_span(op="snuba_rpc.run", description=req.__class__.__name__) as span:
+ span.set_tag("snuba.referrer", referrer)
+ http_resp = _snuba_pool.urlopen(
+ "POST",
+ f"/rpc/{req.__class__.__name__}/v1alpha",
+ body=req.SerializeToString(),
+ headers={
+ "referer": referrer,
+ },
+ )
+ resp = resp_type()
+ resp.ParseFromString(http_resp.data)
+ return resp
From 67322e3250caad4002d65a4421a64896e2b57a4f Mon Sep 17 00:00:00 2001
From: Ryan Skonnord
Date: Wed, 2 Oct 2024 13:27:20 -0700
Subject: [PATCH 035/139] ref(integrations): Extract dialog helper class from
SlackActionEndpoint (#77454)
Factor out duplication between `open_resolve_dialog` and
`open_archive_dialog` by introducing a private helper class. Group
related class into the helper class to make `SlackActionEndpoint`
smaller.
---
.../integrations/slack/webhooks/action.py | 455 ++++++++----------
1 file changed, 205 insertions(+), 250 deletions(-)
diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py
index e546eb3b2c5095..afe5e2a896b37e 100644
--- a/src/sentry/integrations/slack/webhooks/action.py
+++ b/src/sentry/integrations/slack/webhooks/action.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+from abc import ABC, abstractmethod
from collections.abc import Mapping, MutableMapping, Sequence
from typing import Any
@@ -26,7 +27,6 @@
from sentry.exceptions import UnableToAcceptMemberInvitationException
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
-from sentry.integrations.slack.message_builder.types import SlackBody
from sentry.integrations.slack.metrics import (
SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
@@ -160,8 +160,7 @@ def get_group(slack_request: SlackActionRequest) -> Group | None:
def _is_message(data: Mapping[str, Any]) -> bool:
"""
- XXX(epurkhiser): Used in coordination with construct_reply.
- Bot posted messages will not have the type at all.
+ Bot posted messages will not have the type at all.
"""
return data.get("original_message", {}).get("type") == "message"
@@ -324,251 +323,6 @@ def on_status(
user_id=user.id,
)
- def build_format_options(self, options: dict[str, str]) -> list[dict[str, Any]]:
- return [
- {
- "text": {
- "type": "plain_text",
- "text": text,
- "emoji": True,
- },
- "value": value,
- }
- for text, value in options.items()
- ]
-
- def build_modal_payload(
- self,
- title: str,
- action_text: str,
- options: dict[str, str],
- initial_option_text: str,
- initial_option_value: str,
- callback_id: str,
- metadata: str,
- ) -> View:
- formatted_options = self.build_format_options(options)
-
- return View(
- type="modal",
- title={"type": "plain_text", "text": f"{title} Issue"},
- blocks=[
- {
- "type": "section",
- "text": {"type": "mrkdwn", "text": action_text},
- "accessory": {
- "type": "static_select",
- "initial_option": {
- "text": {
- "type": "plain_text",
- "text": initial_option_text,
- "emoji": True,
- },
- "value": initial_option_value,
- },
- "options": formatted_options,
- "action_id": "static_select-action",
- },
- }
- ],
- close={"type": "plain_text", "text": "Cancel"},
- submit={"type": "plain_text", "text": title},
- private_metadata=metadata,
- callback_id=callback_id,
- )
-
- def build_resolve_modal_payload(self, callback_id: str, metadata: str) -> View:
- return self.build_modal_payload(
- title="Resolve",
- action_text="Resolve",
- options=RESOLVE_OPTIONS,
- initial_option_text="Immediately",
- initial_option_value="resolved",
- callback_id=callback_id,
- metadata=metadata,
- )
-
- def build_archive_modal_payload(self, callback_id: str, metadata: str) -> View:
- return self.build_modal_payload(
- title="Archive",
- action_text="Archive",
- options=ARCHIVE_OPTIONS,
- initial_option_text="Until escalating",
- initial_option_value="ignored:archived_until_escalating",
- callback_id=callback_id,
- metadata=metadata,
- )
-
- def _update_modal(
- self,
- slack_client: SlackSdkClient,
- external_id: str,
- modal_payload: View,
- slack_request: SlackActionRequest,
- ) -> None:
- try:
- slack_client.views_update(
- external_id=external_id,
- view=modal_payload,
- )
- except SlackApiError as e:
- # If the external_id is not found, Slack we send `not_found` error
- # https://api.slack.com/methods/views.update
- if unpack_slack_api_error(e) == MODAL_NOT_FOUND:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "update_modal"},
- )
- logging_data = slack_request.get_logging_data()
- _logger.exception(
- "slack.action.update-modal-not-found",
- extra={
- **logging_data,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "resolve",
- },
- )
- # The modal was not found, so we need to open a new one
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- raise
-
- def _open_modal(
- self, slack_client: SlackSdkClient, modal_payload: View, slack_request: SlackActionRequest
- ) -> None:
- # Error handling is done in the calling function
- slack_client.views_open(
- trigger_id=slack_request.data["trigger_id"],
- view=modal_payload,
- )
-
- def open_resolve_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
- # XXX(epurkhiser): In order to update the original message we have to
- # keep track of the response_url in the callback_id. Definitely hacky,
- # but seems like there's no other solutions [1]:
- #
- # [1]: https://stackoverflow.com/questions/46629852/update-a-bot-message-after-responding-to-a-slack-dialog#comment80795670_46629852
- org = group.project.organization
- callback_id_dict = {
- "issue": group.id,
- "orig_response_url": slack_request.data["response_url"],
- "is_message": _is_message(slack_request.data),
- }
- if slack_request.data.get("channel"):
- callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
- callback_id_dict["rule"] = slack_request.callback_data.get("rule")
- callback_id = orjson.dumps(callback_id_dict).decode()
-
- # only add tags to metadata
- metadata_dict = callback_id_dict.copy()
- metadata_dict["tags"] = list(slack_request.get_tags())
- metadata = orjson.dumps(metadata_dict).decode()
-
- # XXX(CEO): the second you make a selection (without hitting Submit) it sends a slightly different request
- modal_payload = self.build_resolve_modal_payload(callback_id, metadata=metadata)
- slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
- try:
- # We need to use the action_ts as the external_id to update the modal
- # We passed this in control when we sent the loading modal to beat the 3 second timeout
- external_id = slack_request.get_action_ts()
-
- if not external_id:
- # If we don't have an external_id or option is disabled we need to open a new modal
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- self._update_modal(slack_client, external_id, modal_payload, slack_request)
-
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "resolve_modal_open"},
- )
- except SlackApiError:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "resolve_modal_open"},
- )
- _logger.exception(
- "slack.action.response-error",
- extra={
- "organization_id": org.id,
- "integration_id": slack_request.integration.id,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "resolve",
- },
- )
-
- def open_archive_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
- org = group.project.organization
-
- callback_id_dict = {
- "issue": group.id,
- "orig_response_url": slack_request.data["response_url"],
- "is_message": _is_message(slack_request.data),
- "rule": slack_request.callback_data.get("rule"),
- }
-
- if slack_request.data.get("channel"):
- callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
- callback_id = orjson.dumps(callback_id_dict).decode()
-
- # only add tags to metadata
- metadata_dict = callback_id_dict.copy()
- metadata_dict["tags"] = list(slack_request.get_tags())
- metadata = orjson.dumps(metadata_dict).decode()
-
- modal_payload = self.build_archive_modal_payload(callback_id, metadata=metadata)
- slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
- try:
- # We need to use the action_ts as the external_id to update the modal
- # We passed this in control when we sent the loading modal to beat the 3 second timeout
- external_id = slack_request.get_action_ts()
-
- if not external_id:
- # If we don't have an external_id or option is disabled we need to open a new modal
- self._open_modal(slack_client, modal_payload, slack_request)
- else:
- self._update_modal(slack_client, external_id, modal_payload, slack_request)
-
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "archive_modal_open"},
- )
- except SlackApiError:
- metrics.incr(
- SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
- sample_rate=1.0,
- tags={"type": "archive_modal_open"},
- )
- _logger.exception(
- "slack.action.response-error",
- extra={
- "organization_id": org.id,
- "integration_id": slack_request.integration.id,
- "trigger_id": slack_request.data["trigger_id"],
- "dialog": "archive",
- },
- )
-
- def construct_reply(self, attachment: SlackBody, is_message: bool = False) -> SlackBody:
- # XXX(epurkhiser): Slack is inconsistent about it's expected responses
- # for interactive action requests.
- #
- # * For _unfurled_ action responses, slack expects the entire
- # attachment body used to replace the unfurled attachment to be at
- # the top level of the json response body.
- #
- # * For _bot posted message_ action responses, slack expects the
- # attachment body used to replace the attachment to be within an
- # `attachments` array.
- if is_message:
- attachment = {"attachments": [attachment]}
-
- return attachment
-
def _handle_group_actions(
self,
slack_request: SlackActionRequest,
@@ -690,10 +444,10 @@ def _handle_group_actions(
): # TODO: remove this as it is replaced by the options-load endpoint
self.on_assign(request, identity_user, group, action)
elif action.name == "resolve_dialog":
- self.open_resolve_dialog(slack_request, group)
+ _ResolveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
elif action.name == "archive_dialog":
- self.open_archive_dialog(slack_request, group)
+ _ArchiveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
except client.ApiError as error:
return self.api_error(slack_request, group, identity_user, error, action.name)
@@ -970,3 +724,204 @@ def handle_member_approval(self, slack_request: SlackActionRequest, action: str)
)
return self.respond({"text": message})
+
+
+class _ModalDialog(ABC):
+ @property
+ @abstractmethod
+ def dialog_type(self) -> str:
+ raise NotImplementedError
+
+ def _build_format_options(self, options: dict[str, str]) -> list[dict[str, Any]]:
+ return [
+ {
+ "text": {
+ "type": "plain_text",
+ "text": text,
+ "emoji": True,
+ },
+ "value": value,
+ }
+ for text, value in options.items()
+ ]
+
+ def build_modal_payload(
+ self,
+ title: str,
+ action_text: str,
+ options: dict[str, str],
+ initial_option_text: str,
+ initial_option_value: str,
+ callback_id: str,
+ metadata: str,
+ ) -> View:
+ formatted_options = self._build_format_options(options)
+
+ return View(
+ type="modal",
+ title={"type": "plain_text", "text": f"{title} Issue"},
+ blocks=[
+ {
+ "type": "section",
+ "text": {"type": "mrkdwn", "text": action_text},
+ "accessory": {
+ "type": "static_select",
+ "initial_option": {
+ "text": {
+ "type": "plain_text",
+ "text": initial_option_text,
+ "emoji": True,
+ },
+ "value": initial_option_value,
+ },
+ "options": formatted_options,
+ "action_id": "static_select-action",
+ },
+ }
+ ],
+ close={"type": "plain_text", "text": "Cancel"},
+ submit={"type": "plain_text", "text": title},
+ private_metadata=metadata,
+ callback_id=callback_id,
+ )
+
+ @abstractmethod
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ raise NotImplementedError
+
+ def _update_modal(
+ self,
+ slack_client: SlackSdkClient,
+ external_id: str,
+ modal_payload: View,
+ slack_request: SlackActionRequest,
+ ) -> None:
+ try:
+ slack_client.views_update(
+ external_id=external_id,
+ view=modal_payload,
+ )
+ except SlackApiError as e:
+ # If the external_id is not found, Slack we send `not_found` error
+ # https://api.slack.com/methods/views.update
+ if unpack_slack_api_error(e) == MODAL_NOT_FOUND:
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": "update_modal"},
+ )
+ logging_data = slack_request.get_logging_data()
+ _logger.exception(
+ "slack.action.update-modal-not-found",
+ extra={
+ **logging_data,
+ "trigger_id": slack_request.data["trigger_id"],
+ "dialog": self.dialog_type,
+ },
+ )
+ # The modal was not found, so we need to open a new one
+ self._open_modal(slack_client, modal_payload, slack_request)
+ else:
+ raise
+
+ def _open_modal(
+ self, slack_client: SlackSdkClient, modal_payload: View, slack_request: SlackActionRequest
+ ) -> None:
+ # Error handling is done in the calling function
+ slack_client.views_open(
+ trigger_id=slack_request.data["trigger_id"],
+ view=modal_payload,
+ )
+
+ def open_dialog(self, slack_request: SlackActionRequest, group: Group) -> None:
+ # XXX(epurkhiser): In order to update the original message we have to
+ # keep track of the response_url in the callback_id. Definitely hacky,
+ # but seems like there's no other solutions [1]:
+ #
+ # [1]: https://stackoverflow.com/questions/46629852/update-a-bot-message-after-responding-to-a-slack-dialog#comment80795670_46629852
+ org = group.project.organization
+
+ callback_id_dict = {
+ "issue": group.id,
+ "orig_response_url": slack_request.data["response_url"],
+ "is_message": _is_message(slack_request.data),
+ "rule": slack_request.callback_data.get("rule"),
+ }
+
+ if slack_request.data.get("channel"):
+ callback_id_dict["channel_id"] = slack_request.data["channel"]["id"]
+ callback_id = orjson.dumps(callback_id_dict).decode()
+
+ # only add tags to metadata
+ metadata_dict = callback_id_dict.copy()
+ metadata_dict["tags"] = list(slack_request.get_tags())
+ metadata = orjson.dumps(metadata_dict).decode()
+
+ # XXX(CEO): the second you make a selection (without hitting Submit) it sends a slightly different request
+ modal_payload = self.get_modal_payload(callback_id, metadata=metadata)
+ slack_client = SlackSdkClient(integration_id=slack_request.integration.id)
+ try:
+ # We need to use the action_ts as the external_id to update the modal
+ # We passed this in control when we sent the loading modal to beat the 3 second timeout
+ external_id = slack_request.get_action_ts()
+
+ if not external_id:
+ # If we don't have an external_id or option is disabled we need to open a new modal
+ self._open_modal(slack_client, modal_payload, slack_request)
+ else:
+ self._update_modal(slack_client, external_id, modal_payload, slack_request)
+
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_SUCCESS_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": f"{self.dialog_type}_modal_open"},
+ )
+ except SlackApiError:
+ metrics.incr(
+ SLACK_WEBHOOK_GROUP_ACTIONS_FAILURE_DATADOG_METRIC,
+ sample_rate=1.0,
+ tags={"type": f"{self.dialog_type}_modal_open"},
+ )
+ _logger.exception(
+ "slack.action.response-error",
+ extra={
+ "organization_id": org.id,
+ "integration_id": slack_request.integration.id,
+ "trigger_id": slack_request.data["trigger_id"],
+ "dialog": self.dialog_type,
+ },
+ )
+
+
+class _ResolveDialog(_ModalDialog):
+ @property
+ def dialog_type(self) -> str:
+ return "resolve"
+
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ return self.build_modal_payload(
+ title="Resolve",
+ action_text="Resolve",
+ options=RESOLVE_OPTIONS,
+ initial_option_text="Immediately",
+ initial_option_value="resolved",
+ callback_id=callback_id,
+ metadata=metadata,
+ )
+
+
+class _ArchiveDialog(_ModalDialog):
+ @property
+ def dialog_type(self) -> str:
+ return "archive"
+
+ def get_modal_payload(self, callback_id: str, metadata: str) -> View:
+ return self.build_modal_payload(
+ title="Archive",
+ action_text="Archive",
+ options=ARCHIVE_OPTIONS,
+ initial_option_text="Until escalating",
+ initial_option_value="ignored:archived_until_escalating",
+ callback_id=callback_id,
+ metadata=metadata,
+ )
From e5b6885a68eb92bc7223a1d69f08c6d486a35123 Mon Sep 17 00:00:00 2001
From: Mark Story
Date: Wed, 2 Oct 2024 16:31:00 -0400
Subject: [PATCH 036/139] chore(deletions) Remove import shims in deletions
(#78404)
With getsentry updated, these import shims can be removed.
Part of #77479
---
src/sentry/deletions/__init__.py | 7 -------
src/sentry/models/__init__.py | 1 -
src/sentry/models/scheduledeletion.py | 6 ------
3 files changed, 14 deletions(-)
delete mode 100644 src/sentry/models/scheduledeletion.py
diff --git a/src/sentry/deletions/__init__.py b/src/sentry/deletions/__init__.py
index e5e7e0a7ec59f5..59e13f34e9a13a 100644
--- a/src/sentry/deletions/__init__.py
+++ b/src/sentry/deletions/__init__.py
@@ -200,13 +200,6 @@ def get_manager() -> DeletionTaskManager:
return _default_manager
-def __getattr__(name: str) -> Any:
- # Shim for getsentry
- if name == "default_manager":
- return get_manager()
- raise AttributeError(f"module {__name__} has no attribute {name}")
-
-
def get(
task: type[BaseDeletionTask[Any]] | None = None,
**kwargs: Any,
diff --git a/src/sentry/models/__init__.py b/src/sentry/models/__init__.py
index 563978898e92b3..0840e6ed19a669 100644
--- a/src/sentry/models/__init__.py
+++ b/src/sentry/models/__init__.py
@@ -104,7 +104,6 @@
from .rulefirehistory import RuleFireHistory # NOQA
from .rulesnooze import RuleSnooze # NOQA
from .savedsearch import * # NOQA
-from .scheduledeletion import * # NOQA
from .search_common import * # NOQA
from .sentryshot import * # NOQA
from .sourcemapprocessingissue import * # NOQA
diff --git a/src/sentry/models/scheduledeletion.py b/src/sentry/models/scheduledeletion.py
deleted file mode 100644
index 02f4d560f42b03..00000000000000
--- a/src/sentry/models/scheduledeletion.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# TODO(mark) Remove getsentry import shim
-from __future__ import annotations
-
-from sentry.deletions.models.scheduleddeletion import RegionScheduledDeletion
-
-__all__ = ("RegionScheduledDeletion",)
From 538de87a379bff6e72c2bd537bfe84f80bfa75d6 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:33:58 -0400
Subject: [PATCH 037/139] fix(rr6): Correct inclusion test of historyMethods
(#78516)
---
static/app/utils/browserHistory.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/static/app/utils/browserHistory.tsx b/static/app/utils/browserHistory.tsx
index 6aa4b862920c65..430b6d2432b649 100644
--- a/static/app/utils/browserHistory.tsx
+++ b/static/app/utils/browserHistory.tsx
@@ -30,7 +30,7 @@ const historyMethods: Array = [
*/
const proxyLegacyBrowserHistory: ProxyHandler = {
get(_target, prop, _receiver) {
- if (prop in historyMethods) {
+ if (historyMethods.includes(prop.toString() as keyof History)) {
// eslint-disable-next-line no-console
console.warn('Legacy browserHistory called before patched!');
Sentry.captureException(new Error('legacy browserHistory called!'), {
From 3d0b35bef64016653ead9df8e463c865581785f7 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:46:54 -0400
Subject: [PATCH 038/139] ref(uptime): Migrate empty object headers to empty
list (#78478)
---
migrations_lockfile.txt | 2 +-
...ranslate_uptime_object_headers_to_lists.py | 45 +++++++++++++++++++
...ranslate_uptime_object_headers_to_lists.py | 17 +++++++
3 files changed, 63 insertions(+), 1 deletion(-)
create mode 100644 src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
create mode 100644 tests/sentry/uptime/migrations/test_0016_translate_uptime_object_headers_to_lists.py
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index f43e5ba7cd5eb5..2bff062a861269 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -12,5 +12,5 @@ remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
sentry: 0770_increase_project_slug_max_length
social_auth: 0002_default_auto_field
-uptime: 0015_headers_deafult_empty_list
+uptime: 0016_translate_uptime_object_headers_to_lists
workflow_engine: 0008_detector_state
diff --git a/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
new file mode 100644
index 00000000000000..7632e1bbef05e6
--- /dev/null
+++ b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
@@ -0,0 +1,45 @@
+# Generated by Django 5.1.1 on 2024-10-02 16:06
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+from django.db.migrations.state import StateApps
+
+from sentry.new_migrations.migrations import CheckedMigration
+from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
+
+
+def migrate_header_objects_to_lists(
+ apps: StateApps,
+ schema_editor: BaseDatabaseSchemaEditor,
+) -> None:
+ UptimeSubscription = apps.get_model("uptime", "UptimeSubscription")
+ for sub in RangeQuerySetWrapperWithProgressBar(UptimeSubscription.objects.filter(headers={})):
+ sub.headers = []
+ sub.save()
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("uptime", "0015_headers_deafult_empty_list"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ migrate_header_objects_to_lists,
+ migrations.RunPython.noop,
+ hints={"tables": ["uptime_uptimesubscription"]},
+ ),
+ ]
diff --git a/tests/sentry/uptime/migrations/test_0016_translate_uptime_object_headers_to_lists.py b/tests/sentry/uptime/migrations/test_0016_translate_uptime_object_headers_to_lists.py
new file mode 100644
index 00000000000000..9b94c207d5295a
--- /dev/null
+++ b/tests/sentry/uptime/migrations/test_0016_translate_uptime_object_headers_to_lists.py
@@ -0,0 +1,17 @@
+from sentry.testutils.cases import TestMigrations
+
+
+class TestTranslateUotimeHeaderObjectsToList(TestMigrations):
+ app = "uptime"
+ migrate_from = "0015_headers_deafult_empty_list"
+ migrate_to = "0016_translate_uptime_object_headers_to_lists"
+
+ def setup_initial_state(self):
+ self.sub = self.create_uptime_subscription(headers={})
+ self.sub2 = self.create_uptime_subscription(headers=[["Accept", "text/html"]])
+
+ def test(self):
+ self.sub.refresh_from_db()
+ self.sub2.refresh_from_db()
+ assert self.sub.headers == []
+ assert self.sub2.headers == [["Accept", "text/html"]]
From 83b4b5dd8e7f0e95067eb4b57c24c58a09782180 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:51:20 -0400
Subject: [PATCH 039/139] ref(rr6): Remove react-router 3 from pipelineView
(#78492)
---
.../integrationPipeline/pipelineView.spec.tsx | 18 ++++++--
.../integrationPipeline/pipelineView.tsx | 42 +++++++++----------
2 files changed, 34 insertions(+), 26 deletions(-)
diff --git a/static/app/views/integrationPipeline/pipelineView.spec.tsx b/static/app/views/integrationPipeline/pipelineView.spec.tsx
index 88dec3db626495..1d6ec312865877 100644
--- a/static/app/views/integrationPipeline/pipelineView.spec.tsx
+++ b/static/app/views/integrationPipeline/pipelineView.spec.tsx
@@ -17,7 +17,12 @@ describe('PipelineView', () => {
});
it('renders awsLambdaProjectSelect', () => {
- render( );
+ render( , {
+ // XXX(epurkhiser): The pipeline view renders a Router inside of it. Stop
+ // our test renderer from rendering it's Router by setting the wrapper to
+ // undefined.
+ wrapper: undefined,
+ });
expect(screen.getByText('mock_AwsLambdaProjectSelect')).toBeInTheDocument();
@@ -27,8 +32,13 @@ describe('PipelineView', () => {
it('errros on invalid pipelineName', () => {
jest.spyOn(console, 'error').mockImplementation(() => {});
- expect(() => render( )).toThrow(
- 'Invalid pipeline name other'
- );
+ expect(() =>
+ render( , {
+ // XXX(epurkhiser): The pipeline view renders a Router inside of it. Stop
+ // our test renderer from rendering it's Router by setting the wrapper to
+ // undefined.
+ wrapper: undefined,
+ })
+ ).toThrow('Invalid pipeline name other');
});
});
diff --git a/static/app/views/integrationPipeline/pipelineView.tsx b/static/app/views/integrationPipeline/pipelineView.tsx
index 9f07055661a801..89f235439909f3 100644
--- a/static/app/views/integrationPipeline/pipelineView.tsx
+++ b/static/app/views/integrationPipeline/pipelineView.tsx
@@ -1,10 +1,9 @@
-import {useEffect} from 'react';
-// biome-ignore lint/nursery/noRestrictedImports: Will be removed with react router 6
-import {createMemoryHistory, Route, Router, RouterContext} from 'react-router';
+import {useEffect, useState} from 'react';
+import {createMemoryRouter, RouterProvider} from 'react-router-dom';
import Indicators from 'sentry/components/indicators';
import {ThemeAndStyleProvider} from 'sentry/components/themeAndStyleProvider';
-import {RouteContext} from 'sentry/views/routeContext';
+import {DANGEROUS_SET_REACT_ROUTER_6_HISTORY} from 'sentry/utils/browserHistory';
import AwsLambdaCloudformation from './awsLambdaCloudformation';
import AwsLambdaFailureDetails from './awsLambdaFailureDetails';
@@ -23,6 +22,18 @@ type Props = {
pipelineName: string;
};
+function buildRouter(Component: React.ComponentType, props: any) {
+ const router = createMemoryRouter([
+ {
+ path: '*',
+ element: ,
+ },
+ ]);
+
+ DANGEROUS_SET_REACT_ROUTER_6_HISTORY(router);
+ return router;
+}
+
/**
* This component is a wrapper for specific pipeline views for integrations
*/
@@ -37,26 +48,13 @@ function PipelineView({pipelineName, ...props}: Props) {
// Set the page title
useEffect(() => void (document.title = title), [title]);
-
- const memoryHistory = createMemoryHistory();
- memoryHistory.push('/');
+ const [router] = useState(() => buildRouter(Component, props));
return (
- {
- return (
-
-
-
-
-
-
- );
- }}
- >
- } props={props} />
-
+
+
+
+
);
}
From 97ec0acf8ea1e430b7ef843753df9fd257e508d4 Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 16:52:45 -0400
Subject: [PATCH 040/139] ref(ui): Bigger next button in aws integration setup
(#78521)
---
.../views/integrationPipeline/components/footerWithButtons.tsx | 1 -
1 file changed, 1 deletion(-)
diff --git a/static/app/views/integrationPipeline/components/footerWithButtons.tsx b/static/app/views/integrationPipeline/components/footerWithButtons.tsx
index 52826b9150125d..91acc03e7e1317 100644
--- a/static/app/views/integrationPipeline/components/footerWithButtons.tsx
+++ b/static/app/views/integrationPipeline/components/footerWithButtons.tsx
@@ -23,7 +23,6 @@ export default function FooterWithButtons({
}: FooterWithButtonsProps) {
const buttonProps = {
priority: 'primary',
- size: 'xs',
disabled,
onClick,
children: buttonText,
From 0938f30809010d74d318ce8b17bb6dbde367a00d Mon Sep 17 00:00:00 2001
From: Colleen O'Rourke
Date: Wed, 2 Oct 2024 13:58:46 -0700
Subject: [PATCH 041/139] ref(alerts): Remove unused sessions code (#78512)
Follow up to https://github.com/getsentry/sentry/pull/68764 which
removed sessions code but didn't remove this function that's not used
anywhere anymore.
---
.../incidents/subscription_processor.py | 53 -------------------
1 file changed, 53 deletions(-)
diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py
index 9f25e8814af1b8..2a60c197e6843d 100644
--- a/src/sentry/incidents/subscription_processor.py
+++ b/src/sentry/incidents/subscription_processor.py
@@ -16,7 +16,6 @@
from sentry import features
from sentry.conf.server import SEER_ANOMALY_DETECTION_ENDPOINT_URL
-from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
from sentry.incidents.logic import (
CRITICAL_TRIGGER_LABEL,
WARNING_TRIGGER_LABEL,
@@ -281,58 +280,6 @@ def get_comparison_aggregation_value(
result: float = (aggregation_value / comparison_aggregate) * 100
return result
- def get_crash_rate_alert_aggregation_value(
- self, subscription_update: QuerySubscriptionUpdate
- ) -> float | None:
- """
- Handles validation and extraction of Crash Rate Alerts subscription updates values.
- The subscription update looks like
- {
- '_crash_rate_alert_aggregate': 0.5,
- '_total_count': 34
- }
- - `_crash_rate_alert_aggregate` represents sessions_crashed/sessions or
- users_crashed/users, and so we need to subtract that number from 1 and then multiply by
- 100 to get the crash free percentage
- - `_total_count` represents the total sessions or user counts. This is used when
- CRASH_RATE_ALERT_MINIMUM_THRESHOLD is set in the sense that if the minimum threshold is
- greater than the session count, then the update is dropped. If the minimum threshold is
- not set then the total sessions count is just ignored
- """
- aggregation_value = subscription_update["values"]["data"][0][
- CRASH_RATE_ALERT_AGGREGATE_ALIAS
- ]
- if aggregation_value is None:
- self.reset_trigger_counts()
- metrics.incr("incidents.alert_rules.ignore_update_no_session_data")
- return None
-
- try:
- total_count = subscription_update["values"]["data"][0][
- CRASH_RATE_ALERT_SESSION_COUNT_ALIAS
- ]
- if CRASH_RATE_ALERT_MINIMUM_THRESHOLD is not None:
- min_threshold = int(CRASH_RATE_ALERT_MINIMUM_THRESHOLD)
- if total_count < min_threshold:
- self.reset_trigger_counts()
- metrics.incr(
- "incidents.alert_rules.ignore_update_count_lower_than_min_threshold"
- )
- return None
- except KeyError:
- # If for whatever reason total session count was not sent in the update,
- # ignore the minimum threshold comparison and continue along with processing the
- # update. However, this should not happen.
- logger.exception(
- "Received an update for a crash rate alert subscription, but no total "
- "sessions count was sent"
- )
- # The subscription aggregation for crash rate alerts uses the Discover percentage
- # function, which would technically return a ratio of sessions_crashed/sessions and
- # so we need to calculate the crash free percentage out of that returned value
- aggregation_value_result: int = round((1 - aggregation_value) * 100, 3)
- return aggregation_value_result
-
def get_crash_rate_alert_metrics_aggregation_value(
self, subscription_update: QuerySubscriptionUpdate
) -> float | None:
From c4fe51a9b02d29c8fd4ca40a490bef0c378c167b Mon Sep 17 00:00:00 2001
From: Christinarlong <60594860+Christinarlong@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:15:40 -0700
Subject: [PATCH 042/139] chore(sentry apps): Create forward shims for sentry
apps tasks (#78344)
---
pyproject.toml | 1 -
src/sentry/conf/server.py | 1 +
src/sentry/sentry_apps/tasks/__init__.py | 23 ++++
src/sentry/sentry_apps/tasks/sentry_apps.py | 120 +++++++++++++++++
src/sentry/sentry_apps/tasks/service_hooks.py | 14 ++
src/sentry/tasks/sentry_apps.py | 123 +++++++++++-------
6 files changed, 235 insertions(+), 47 deletions(-)
create mode 100644 src/sentry/sentry_apps/tasks/__init__.py
create mode 100644 src/sentry/sentry_apps/tasks/sentry_apps.py
create mode 100644 src/sentry/sentry_apps/tasks/service_hooks.py
diff --git a/pyproject.toml b/pyproject.toml
index ce544044fa3f61..9da4cbf52b1779 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -343,7 +343,6 @@ module = [
"sentry.tasks.auth",
"sentry.tasks.base",
"sentry.tasks.process_buffer",
- "sentry.tasks.sentry_apps",
"sentry.templatetags.sentry_assets",
"sentry.templatetags.sentry_helpers",
"sentry.templatetags.sentry_plugins",
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index 0a4adad3ab044b..252ee023fd943b 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -752,6 +752,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"sentry.integrations.github.tasks.pr_comment",
"sentry.integrations.jira.tasks",
"sentry.integrations.opsgenie.tasks",
+ "sentry.sentry_apps.tasks",
"sentry.snuba.tasks",
"sentry.replays.tasks",
"sentry.monitors.tasks.clock_pulse",
diff --git a/src/sentry/sentry_apps/tasks/__init__.py b/src/sentry/sentry_apps/tasks/__init__.py
new file mode 100644
index 00000000000000..62d7eee5405b7d
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/__init__.py
@@ -0,0 +1,23 @@
+from .sentry_apps import (
+ build_comment_webhook,
+ clear_region_cache,
+ create_or_update_service_hooks_for_sentry_app,
+ installation_webhook,
+ process_resource_change_bound,
+ send_alert_event,
+ send_resource_change_webhook,
+ workflow_notification,
+)
+from .service_hooks import process_service_hook
+
+__all__ = (
+ "send_alert_event",
+ "build_comment_webhook",
+ "clear_region_cache",
+ "create_or_update_service_hooks_for_sentry_app",
+ "installation_webhook",
+ "process_resource_change_bound",
+ "send_resource_change_webhook",
+ "workflow_notification",
+ "process_service_hook",
+)
diff --git a/src/sentry/sentry_apps/tasks/sentry_apps.py b/src/sentry/sentry_apps/tasks/sentry_apps.py
new file mode 100644
index 00000000000000..59b63336b29b75
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/sentry_apps.py
@@ -0,0 +1,120 @@
+from collections.abc import Mapping
+from typing import Any
+
+from celery import Task
+
+from sentry.eventstore.models import Event
+from sentry.tasks.base import instrumented_task
+from sentry.tasks.sentry_apps import CONTROL_TASK_OPTIONS, TASK_OPTIONS
+from sentry.tasks.sentry_apps import build_comment_webhook as old_build_comment_webhook
+from sentry.tasks.sentry_apps import clear_region_cache as old_clear_region_cache
+from sentry.tasks.sentry_apps import (
+ create_or_update_service_hooks_for_sentry_app as old_create_or_update_service_hooks_for_sentry_app,
+)
+from sentry.tasks.sentry_apps import installation_webhook as old_installation_webhook
+from sentry.tasks.sentry_apps import (
+ process_resource_change_bound as old_process_resource_change_bound,
+)
+from sentry.tasks.sentry_apps import retry_decorator
+from sentry.tasks.sentry_apps import send_alert_event as old_send_alert_event
+from sentry.tasks.sentry_apps import (
+ send_resource_change_webhook as old_send_resource_change_webhook,
+)
+from sentry.tasks.sentry_apps import workflow_notification as old_workflow_notification
+
+
+@instrumented_task(name="sentry.sentry_apps.tasks.sentry_apps.send_alert_event", **TASK_OPTIONS)
+@retry_decorator
+def send_alert_event(
+ event: Event,
+ rule: str,
+ sentry_app_id: int,
+ additional_payload_key: str | None = None,
+ additional_payload: Mapping[str, Any] | None = None,
+) -> None:
+ old_send_alert_event(
+ event=event,
+ rule=rule,
+ sentry_app_id=sentry_app_id,
+ additional_payload_key=additional_payload_key,
+ additional_payload=additional_payload,
+ )
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.process_resource_change_bound", bind=True, **TASK_OPTIONS
+)
+@retry_decorator
+def process_resource_change_bound(
+ self: Task, action: str, sender: str, instance_id: int, **kwargs: Any
+) -> None:
+ old_process_resource_change_bound(
+ self=self, action=action, sender=sender, instance_id=instance_id, **kwargs
+ )
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.installation_webhook", **CONTROL_TASK_OPTIONS
+)
+@retry_decorator
+def installation_webhook(installation_id, user_id, *args, **kwargs):
+ old_installation_webhook(installation_id=installation_id, user_id=user_id, *args, **kwargs)
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.clear_region_cache", **CONTROL_TASK_OPTIONS
+)
+def clear_region_cache(sentry_app_id: int, region_name: str) -> None:
+ old_clear_region_cache(sentry_app_id=sentry_app_id, region_name=region_name)
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.workflow_notification", **TASK_OPTIONS
+)
+@retry_decorator
+def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwargs):
+ old_workflow_notification(
+ installation_id=installation_id,
+ issue_id=issue_id,
+ type=type,
+ user_id=user_id,
+ *args,
+ **kwargs,
+ )
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.sentry_apps.build_comment_webhook", **TASK_OPTIONS
+)
+@retry_decorator
+def build_comment_webhook(installation_id, issue_id, type, user_id, *args, **kwargs):
+ old_build_comment_webhook(
+ installation_id=installation_id,
+ issue_id=issue_id,
+ type=type,
+ user_id=user_id,
+ *args,
+ **kwargs,
+ )
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.send_resource_change_webhook", **TASK_OPTIONS
+)
+@retry_decorator
+def send_resource_change_webhook(installation_id, event, data, *args, **kwargs):
+ old_send_resource_change_webhook(
+ installation_id=installation_id, event=event, data=data, *args, **kwargs
+ )
+
+
+@instrumented_task(
+ "sentry.sentry_apps.tasks.sentry_apps.create_or_update_service_hooks_for_sentry_app",
+ **CONTROL_TASK_OPTIONS,
+)
+def create_or_update_service_hooks_for_sentry_app(
+ sentry_app_id: int, webhook_url: str, events: list[str], **kwargs: dict
+) -> None:
+ old_create_or_update_service_hooks_for_sentry_app(
+ sentry_app_id=sentry_app_id, webhook_url=webhook_url, events=events, **kwargs
+ )
diff --git a/src/sentry/sentry_apps/tasks/service_hooks.py b/src/sentry/sentry_apps/tasks/service_hooks.py
new file mode 100644
index 00000000000000..cd9def6898102c
--- /dev/null
+++ b/src/sentry/sentry_apps/tasks/service_hooks.py
@@ -0,0 +1,14 @@
+from sentry.silo.base import SiloMode
+from sentry.tasks.base import instrumented_task, retry
+from sentry.tasks.servicehooks import process_service_hook as old_process_service_hook
+
+
+@instrumented_task(
+ name="sentry.sentry_apps.tasks.service_hooks.process_service_hook",
+ default_retry_delay=60 * 5,
+ max_retries=5,
+ silo_mode=SiloMode.REGION,
+)
+@retry
+def process_service_hook(servicehook_id, event, **kwargs):
+ old_process_service_hook(servicehook_id=servicehook_id, event=event, **kwargs)
diff --git a/src/sentry/tasks/sentry_apps.py b/src/sentry/tasks/sentry_apps.py
index 8becf3e2f66b65..f92b043a57b8b0 100644
--- a/src/sentry/tasks/sentry_apps.py
+++ b/src/sentry/tasks/sentry_apps.py
@@ -5,14 +5,15 @@
from collections.abc import Mapping
from typing import Any
-from celery import current_task
+from celery import Task, current_task
from django.urls import reverse
from requests.exceptions import RequestException
from sentry import analytics
from sentry.api.serializers import serialize
from sentry.constants import SentryAppInstallationStatus
-from sentry.eventstore.models import Event, GroupEvent
+from sentry.db.models.base import Model
+from sentry.eventstore.models import BaseEvent, Event, GroupEvent
from sentry.hybridcloud.rpc.caching import region_caching_service
from sentry.models.activity import Activity
from sentry.models.group import Group
@@ -23,6 +24,7 @@
from sentry.sentry_apps.models.sentry_app import VALID_EVENTS, SentryApp
from sentry.sentry_apps.models.sentry_app_installation import SentryAppInstallation
from sentry.sentry_apps.models.servicehook import ServiceHook, ServiceHookProject
+from sentry.sentry_apps.services.app.model import RpcSentryAppInstallation
from sentry.sentry_apps.services.app.service import (
app_service,
get_by_application_id,
@@ -31,6 +33,7 @@
from sentry.shared_integrations.exceptions import ApiHostError, ApiTimeoutError, ClientError
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task, retry
+from sentry.users.services.user.model import RpcUser
from sentry.users.services.user.service import user_service
from sentry.utils import metrics
from sentry.utils.http import absolute_uri
@@ -68,7 +71,9 @@
TYPES = {"Group": Group, "Error": Event, "Comment": Activity}
-def _webhook_event_data(event, group_id, project_id):
+def _webhook_event_data(
+ event: Event | GroupEvent, group_id: int, project_id: int
+) -> dict[str, Any]:
project = Project.objects.get_from_cache(id=project_id)
organization = Organization.objects.get_from_cache(id=project.organization_id)
@@ -113,6 +118,7 @@ def send_alert_event(
:return:
"""
group = event.group
+ assert group, "Group must exist to get related attributes"
project = Project.objects.get_from_cache(id=group.project_id)
organization = Organization.objects.get_from_cache(id=project.organization_id)
@@ -164,9 +170,17 @@ def send_alert_event(
)
-def _process_resource_change(action, sender, instance_id, retryer=None, *args, **kwargs):
+def _process_resource_change(
+ *,
+ action: str,
+ sender: str,
+ instance_id: int,
+ retryer: Task | None = None,
+ **kwargs: Any,
+) -> None:
# The class is serialized as a string when enqueueing the class.
- model = TYPES[sender]
+ model: type[Event] | type[Model] = TYPES[sender]
+ instance: Event | Model | None = None
# The Event model has different hooks for the different event types. The sender
# determines which type eg. Error and therefore the 'name' eg. error
if issubclass(model, Event):
@@ -186,19 +200,19 @@ def _process_resource_change(action, sender, instance_id, retryer=None, *args, *
# We may run into a race condition where this task executes before the
# transaction that creates the Group has committed.
- try:
- if issubclass(model, Event):
- # XXX:(Meredith): Passing through the entire event was an intentional choice
- # to avoid having to query NodeStore again for data we had previously in
- # post_process. While this is not ideal, changing this will most likely involve
- # an overhaul of how we do things in post_process, not just this task alone.
- instance = kwargs.get("instance")
- else:
+ if issubclass(model, Event):
+ # XXX:(Meredith): Passing through the entire event was an intentional choice
+ # to avoid having to query NodeStore again for data we had previously in
+ # post_process. While this is not ideal, changing this will most likely involve
+ # an overhaul of how we do things in post_process, not just this task alone.
+ instance = kwargs.get("instance")
+ else:
+ try:
instance = model.objects.get(id=instance_id)
- except model.DoesNotExist as e:
- # Explicitly requeue the task, so we don't report this to Sentry until
- # we hit the max number of retries.
- return retryer.retry(exc=e)
+ except model.DoesNotExist as e:
+ # Explicitly requeue the task, so we don't report this to Sentry until
+ # we hit the max number of retries.
+ return retryer.retry(exc=e)
event = f"{name}.{action}"
@@ -211,32 +225,40 @@ def _process_resource_change(action, sender, instance_id, retryer=None, *args, *
org = Organization.objects.get_from_cache(
id=Project.objects.get_from_cache(id=instance.project_id).organization_id
)
-
- installations = filter(
- lambda i: event in i.sentry_app.events,
- app_service.get_installed_for_organization(organization_id=org.id),
- )
-
- for installation in installations:
- data = {}
- if isinstance(instance, Event) or isinstance(instance, GroupEvent):
- data[name] = _webhook_event_data(instance, instance.group_id, instance.project_id)
- else:
- data[name] = serialize(instance)
-
- # Trigger a new task for each webhook
- send_resource_change_webhook.delay(installation_id=installation.id, event=event, data=data)
+ assert org, "organization must exist to get related sentry app installations"
+ installations: list[RpcSentryAppInstallation] = [
+ installation
+ for installation in app_service.get_installed_for_organization(organization_id=org.id)
+ if event in installation.sentry_app.events
+ ]
+
+ for installation in installations:
+ data = {}
+ if isinstance(instance, (Event, GroupEvent)):
+ assert instance.group_id, "group id is required to create webhook event data"
+ data[name] = _webhook_event_data(instance, instance.group_id, instance.project_id)
+ else:
+ data[name] = serialize(instance)
+
+ # Trigger a new task for each webhook
+ send_resource_change_webhook.delay(
+ installation_id=installation.id, event=event, data=data
+ )
@instrumented_task("sentry.tasks.process_resource_change_bound", bind=True, **TASK_OPTIONS)
@retry_decorator
-def process_resource_change_bound(self, action, sender, instance_id, *args, **kwargs):
- _process_resource_change(action, sender, instance_id, retryer=self, *args, **kwargs)
+def process_resource_change_bound(
+ self: Task, action: str, sender: str, instance_id: int, **kwargs: Any
+) -> None:
+ _process_resource_change(
+ action=action, sender=sender, instance_id=instance_id, retryer=self, **kwargs
+ )
@instrumented_task(name="sentry.tasks.sentry_apps.installation_webhook", **CONTROL_TASK_OPTIONS)
@retry_decorator
-def installation_webhook(installation_id, user_id, *args, **kwargs):
+def installation_webhook(installation_id: int, user_id: int, *args: Any, **kwargs: Any) -> None:
from sentry.mediators.sentry_app_installations.installation_notifier import InstallationNotifier
extra = {"installation_id": installation_id, "user_id": user_id}
@@ -295,7 +317,9 @@ def clear_region_cache(sentry_app_id: int, region_name: str) -> None:
@instrumented_task(name="sentry.tasks.sentry_apps.workflow_notification", **TASK_OPTIONS)
@retry_decorator
-def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwargs):
+def workflow_notification(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
webhook_data = get_webhook_data(installation_id, issue_id, user_id)
if not webhook_data:
return
@@ -313,10 +337,12 @@ def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwa
@instrumented_task(name="sentry.tasks.sentry_apps.build_comment_webhook", **TASK_OPTIONS)
@retry_decorator
-def build_comment_webhook(installation_id, issue_id, type, user_id, *args, **kwargs):
+def build_comment_webhook(
+ installation_id: int, issue_id: int, type: str, user_id: int, *args: Any, **kwargs: Any
+) -> None:
webhook_data = get_webhook_data(installation_id, issue_id, user_id)
if not webhook_data:
- return
+ return None
install, _, user = webhook_data
data = kwargs.get("data", {})
project_slug = data.get("project_slug")
@@ -340,18 +366,20 @@ def build_comment_webhook(installation_id, issue_id, type, user_id, *args, **kwa
)
-def get_webhook_data(installation_id, issue_id, user_id):
+def get_webhook_data(
+ installation_id: int, issue_id: int, user_id: int
+) -> tuple[RpcSentryAppInstallation, Group, RpcUser | None] | None:
extra = {"installation_id": installation_id, "issue_id": issue_id}
install = app_service.installation_by_id(id=installation_id)
if not install:
logger.info("workflow_notification.missing_installation", extra=extra)
- return
+ return None
try:
issue = Group.objects.get(id=issue_id)
except Group.DoesNotExist:
logger.info("workflow_notification.missing_issue", extra=extra)
- return
+ return None
user = None
if user_id:
@@ -364,7 +392,9 @@ def get_webhook_data(installation_id, issue_id, user_id):
@instrumented_task("sentry.tasks.send_process_resource_change_webhook", **TASK_OPTIONS)
@retry_decorator
-def send_resource_change_webhook(installation_id, event, data, *args, **kwargs):
+def send_resource_change_webhook(
+ installation_id: int, event: str, data: dict[str, Any], *args: Any, **kwargs: Any
+) -> None:
installation = app_service.installation_by_id(id=installation_id)
if not installation:
logger.info(
@@ -378,12 +408,12 @@ def send_resource_change_webhook(installation_id, event, data, *args, **kwargs):
metrics.incr("resource_change.processed", sample_rate=1.0, tags={"change_event": event})
-def notify_sentry_app(event, futures):
+def notify_sentry_app(event: BaseEvent, futures):
for f in futures:
if not f.kwargs.get("sentry_app"):
continue
- extra_kwargs = {
+ extra_kwargs: dict[str, Any] = {
"additional_payload_key": None,
"additional_payload": None,
}
@@ -406,7 +436,8 @@ def notify_sentry_app(event, futures):
)
-def send_webhooks(installation, event, **kwargs):
+def send_webhooks(installation: RpcSentryAppInstallation, event: str, **kwargs: Any) -> None:
+ servicehook: ServiceHook
try:
servicehook = ServiceHook.objects.get(
organization_id=installation.organization_id, actor_id=installation.id
@@ -452,7 +483,7 @@ def send_webhooks(installation, event, **kwargs):
send_and_save_webhook_request(
installation.sentry_app,
request_data,
- servicehook.sentry_app.webhook_url,
+ installation.sentry_app.webhook_url,
)
From c9f64d5b3202c40c6eaab02505e1d9ac5179d4ba Mon Sep 17 00:00:00 2001
From: mia hsu <55610339+ameliahsu@users.noreply.github.com>
Date: Wed, 2 Oct 2024 14:25:45 -0700
Subject: [PATCH 043/139] feat(onboarding): add feature flag for messaging
integration onboarding (#78486)
add feature flag for adding messaging integration onboarding to the
project creation page
---
src/sentry/features/temporary.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 52c177d5984b88..cbe0a8621a1def 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -183,6 +183,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:mep-use-default-tags", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
# Enable messaging integration onboarding when setting up alerts
manager.add("organizations:messaging-integration-onboarding", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable messaging-integration onboarding when creating a new project
+ manager.add("organizations:messaging-integration-onboarding-project-creation", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable metric alert charts in email/slack
manager.add("organizations:metric-alert-chartcuterie", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=True)
# Enable threshold period in metric alert rule builder
From b73219f3c72a7706dcec306e12ac0439bd62ab7e Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Wed, 2 Oct 2024 14:38:03 -0700
Subject: [PATCH 044/139] fix(issues): Remove border radius when header stuck
(#78524)
---
static/app/views/issueDetails/streamline/eventDetails.tsx | 7 +++++++
.../app/views/issueDetails/streamline/eventNavigation.tsx | 4 ++++
2 files changed, 11 insertions(+)
diff --git a/static/app/views/issueDetails/streamline/eventDetails.tsx b/static/app/views/issueDetails/streamline/eventDetails.tsx
index 771b9a408f4007..499e0c71574547 100644
--- a/static/app/views/issueDetails/streamline/eventDetails.tsx
+++ b/static/app/views/issueDetails/streamline/eventDetails.tsx
@@ -13,6 +13,7 @@ import {EnvironmentPageFilter} from 'sentry/components/organizations/environment
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {MultiSeriesEventsStats} from 'sentry/types/organization';
+import {useIsStuck} from 'sentry/utils/useIsStuck';
import {useLocation} from 'sentry/utils/useLocation';
import useMedia from 'sentry/utils/useMedia';
import {useNavigate} from 'sentry/utils/useNavigate';
@@ -55,6 +56,7 @@ export function EventDetails({
const isScreenMedium = useMedia(`(max-width: ${theme.breakpoints.medium})`);
const {environments} = selection;
const [nav, setNav] = useState(null);
+ const isStuck = useIsStuck(nav);
const {eventDetails, dispatch} = useEventDetailsReducer();
const searchQuery = useEventQuery({group});
@@ -160,6 +162,7 @@ export function EventDetails({
ref={setNav}
query={searchQuery}
onViewAllEvents={() => setPageContent(EventPageContent.LIST)}
+ data-stuck={isStuck}
/>
@@ -197,6 +200,10 @@ const FloatingEventNavigation = styled(EventNavigation)`
background: ${p => p.theme.background};
z-index: 500;
border-radius: ${p => p.theme.borderRadiusTop};
+
+ &[data-stuck='true'] {
+ border-radius: 0;
+ }
`;
const ExtraContent = styled('div')`
diff --git a/static/app/views/issueDetails/streamline/eventNavigation.tsx b/static/app/views/issueDetails/streamline/eventNavigation.tsx
index a4ce2f4dab9dcf..46011297b95f34 100644
--- a/static/app/views/issueDetails/streamline/eventNavigation.tsx
+++ b/static/app/views/issueDetails/streamline/eventNavigation.tsx
@@ -46,6 +46,10 @@ type EventNavigationProps = {
group: Group;
onViewAllEvents: (e: React.MouseEvent) => void;
className?: string;
+ /**
+ * Data property to help style the component when it's sticky
+ */
+ 'data-stuck'?: boolean;
query?: string;
style?: CSSProperties;
};
From cb3e5cf130defedb6cb5ff4172456e4fae51f64c Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 17:39:47 -0400
Subject: [PATCH 045/139] feat(uptime): Include environment in serialized
response (#78504)
---
src/sentry/uptime/endpoints/serializers.py | 4 +++-
tests/sentry/uptime/endpoints/test_serializers.py | 3 +++
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/src/sentry/uptime/endpoints/serializers.py b/src/sentry/uptime/endpoints/serializers.py
index 6d1e4e66d92320..96bfc573f411b8 100644
--- a/src/sentry/uptime/endpoints/serializers.py
+++ b/src/sentry/uptime/endpoints/serializers.py
@@ -12,6 +12,7 @@
class ProjectUptimeSubscriptionSerializerResponse(TypedDict):
id: str
projectSlug: str
+ environment: str | None
name: str
status: int
mode: int
@@ -32,7 +33,7 @@ def __init__(self, expand=None):
def get_attrs(
self, item_list: Sequence[ProjectUptimeSubscription], user: Any, **kwargs: Any
) -> MutableMapping[Any, Any]:
- prefetch_related_objects(item_list, "uptime_subscription", "project")
+ prefetch_related_objects(item_list, "uptime_subscription", "project", "environment")
owners = list(filter(None, [item.owner for item in item_list]))
owners_serialized = serialize(
Actor.resolve_many(owners, filter_none=False), user, ActorSerializer()
@@ -58,6 +59,7 @@ def serialize(
return {
"id": str(obj.id),
"projectSlug": obj.project.slug,
+ "environment": obj.environment.name if obj.environment else None,
"name": obj.name or f"Uptime Monitoring for {obj.uptime_subscription.url}",
"status": obj.uptime_status,
"mode": obj.mode,
diff --git a/tests/sentry/uptime/endpoints/test_serializers.py b/tests/sentry/uptime/endpoints/test_serializers.py
index 4526fac75b1685..75eaf444428bdc 100644
--- a/tests/sentry/uptime/endpoints/test_serializers.py
+++ b/tests/sentry/uptime/endpoints/test_serializers.py
@@ -11,6 +11,7 @@ def test(self):
"id": str(uptime_monitor.id),
"projectSlug": self.project.slug,
"name": uptime_monitor.name,
+ "environment": uptime_monitor.environment.name if uptime_monitor.environment else None,
"status": uptime_monitor.uptime_status,
"mode": uptime_monitor.mode,
"url": uptime_monitor.uptime_subscription.url,
@@ -33,6 +34,7 @@ def test_default_name(self):
"id": str(uptime_monitor.id),
"projectSlug": self.project.slug,
"name": f"Uptime Monitoring for {uptime_monitor.uptime_subscription.url}",
+ "environment": uptime_monitor.environment.name if uptime_monitor.environment else None,
"status": uptime_monitor.uptime_status,
"mode": uptime_monitor.mode,
"url": uptime_monitor.uptime_subscription.url,
@@ -52,6 +54,7 @@ def test_owner(self):
"id": str(uptime_monitor.id),
"projectSlug": self.project.slug,
"name": uptime_monitor.name,
+ "environment": uptime_monitor.environment.name if uptime_monitor.environment else None,
"status": uptime_monitor.uptime_status,
"mode": uptime_monitor.mode,
"url": uptime_monitor.uptime_subscription.url,
From 41decd550ae9e127666d4c75b28889a66c21370f Mon Sep 17 00:00:00 2001
From: Ryan Skonnord
Date: Wed, 2 Oct 2024 14:41:34 -0700
Subject: [PATCH 046/139] ref(integrations): Introduce common dispatcher for
webhook commands (#77169)
Introduce `MessagingIntegrationCommand`, an abstraction for the global
set of supported chat commands with consistent strings to invoke them.
Combine code in various places for parsing command text into
`MessagingIntegrationCommandDispatcher`.
---
.../integrations/discord/webhooks/command.py | 74 ++++++++-----
src/sentry/integrations/messaging/commands.py | 103 ++++++++++++++++++
src/sentry/integrations/msteams/webhook.py | 73 +++++++++----
.../integrations/slack/requests/base.py | 5 +
.../integrations/slack/webhooks/base.py | 64 ++++++-----
5 files changed, 242 insertions(+), 77 deletions(-)
create mode 100644 src/sentry/integrations/messaging/commands.py
diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py
index 5f411659987100..b5a5dcc16ae381 100644
--- a/src/sentry/integrations/discord/webhooks/command.py
+++ b/src/sentry/integrations/discord/webhooks/command.py
@@ -1,10 +1,20 @@
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
+
from rest_framework.response import Response
+from sentry.integrations.discord.requests.base import DiscordRequest
+from sentry.integrations.discord.utils import logger
from sentry.integrations.discord.views.link_identity import build_linking_url
from sentry.integrations.discord.views.unlink_identity import build_unlinking_url
from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler
-
-from ..utils import logger
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account."
ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`."
@@ -22,12 +32,6 @@
"""
-class DiscordCommandNames:
- LINK = "link"
- UNLINK = "unlink"
- HELP = "help"
-
-
class DiscordCommandHandler(DiscordInteractionHandler):
"""
Handles logic for Discord Command interactions.
@@ -37,25 +41,35 @@ class DiscordCommandHandler(DiscordInteractionHandler):
def handle(self) -> Response:
command_name = self.request.get_command_name()
- logging_data = self.request.logging_data
+ cmd_input = CommandInput(command_name)
+ dispatcher = DiscordCommandDispatcher(self.request)
+ try:
+ message = dispatcher.dispatch(cmd_input)
+ except CommandNotMatchedError:
+ logger.warning(
+ "discord.interaction.command.unknown",
+ extra={"command": command_name, **self.request.logging_data},
+ )
+ message = dispatcher.help(cmd_input)
- if command_name == DiscordCommandNames.LINK:
- return self.link_user()
- elif command_name == DiscordCommandNames.UNLINK:
- return self.unlink_user()
- elif command_name == DiscordCommandNames.HELP:
- return self.help()
+ return self.send_message(message)
- logger.warning(
- "discord.interaction.command.unknown", extra={"command": command_name, **logging_data}
- )
- return self.help()
- def link_user(self) -> Response:
+@dataclass(frozen=True)
+class DiscordCommandDispatcher(MessagingIntegrationCommandDispatcher[str]):
+ request: DiscordRequest
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], str]]]:
+ yield commands.HELP, self.help
+ yield commands.LINK_IDENTITY, self.link_user
+ yield commands.UNLINK_IDENTITY, self.unlink_user
+
+ def link_user(self, _: CommandInput) -> str:
if self.request.has_identity():
- return self.send_message(
- ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str())
- )
+ return ALREADY_LINKED_MESSAGE.format(email=self.request.get_identity_str())
if not self.request.integration or not self.request.user_id:
logger.warning(
@@ -65,18 +79,18 @@ def link_user(self) -> Response:
"hasUserId": self.request.user_id,
},
)
- return self.send_message(MISSING_DATA_MESSAGE)
+ return MISSING_DATA_MESSAGE
link_url = build_linking_url(
integration=self.request.integration,
discord_id=self.request.user_id,
)
- return self.send_message(LINK_USER_MESSAGE.format(url=link_url))
+ return LINK_USER_MESSAGE.format(url=link_url)
- def unlink_user(self) -> Response:
+ def unlink_user(self, _: CommandInput) -> str:
if not self.request.has_identity():
- return self.send_message(NOT_LINKED_MESSAGE)
+ return NOT_LINKED_MESSAGE
# if self.request.has_identity() then these must not be None
assert self.request.integration is not None
@@ -87,7 +101,7 @@ def unlink_user(self) -> Response:
discord_id=self.request.user_id,
)
- return self.send_message(UNLINK_USER_MESSAGE.format(url=unlink_url))
+ return UNLINK_USER_MESSAGE.format(url=unlink_url)
- def help(self) -> Response:
- return self.send_message(HELP_MESSAGE)
+ def help(self, _: CommandInput) -> str:
+ return HELP_MESSAGE
diff --git a/src/sentry/integrations/messaging/commands.py b/src/sentry/integrations/messaging/commands.py
new file mode 100644
index 00000000000000..767ceadd59a0c9
--- /dev/null
+++ b/src/sentry/integrations/messaging/commands.py
@@ -0,0 +1,103 @@
+import itertools
+from abc import ABC, abstractmethod
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
+from typing import Generic, TypeVar
+
+
+@dataclass(frozen=True, eq=True)
+class CommandInput:
+ cmd_value: str
+ arg_values: tuple[str, ...] = ()
+
+ def get_all_tokens(self) -> Iterable[str]:
+ yield self.cmd_value
+ yield from self.arg_values
+
+ def adjust(self, slug: "CommandSlug") -> "CommandInput":
+ """Remove the args that are part of a slug."""
+ token_count = len(slug.tokens) - 1
+ slug_part = [self.cmd_value] + list(self.arg_values)[:token_count]
+ remaining_args = self.arg_values[token_count:]
+ return CommandInput(" ".join(slug_part), remaining_args)
+
+
+class CommandNotMatchedError(Exception):
+ def __init__(self, message: str, unmatched_input: CommandInput) -> None:
+ super().__init__(message)
+ self.unmatched_input = unmatched_input
+
+
+class CommandSlug:
+ def __init__(self, text: str) -> None:
+ self.tokens = tuple(token.casefold() for token in text.strip().split())
+
+ def does_match(self, cmd_input: CommandInput) -> bool:
+ if not self.tokens:
+ return cmd_input.cmd_value == "" and not cmd_input.arg_values
+ cmd_prefix = itertools.islice(cmd_input.get_all_tokens(), 0, len(self.tokens))
+ cmd_tokens = tuple(token.casefold() for token in cmd_prefix)
+ return self.tokens == cmd_tokens
+
+ def __repr__(self):
+ joined_tokens = " ".join(self.tokens)
+ return f"{type(self).__name__}({joined_tokens!r})"
+
+
+class MessagingIntegrationCommand:
+ def __init__(self, name: str, command_text: str, aliases: Iterable[str] = ()) -> None:
+ super().__init__()
+ self.name = name
+ self.command_slug = CommandSlug(command_text)
+ self.aliases = frozenset(CommandSlug(alias) for alias in aliases)
+
+ @staticmethod
+ def _to_tokens(text: str) -> tuple[str, ...]:
+ return tuple(token.casefold() for token in text.strip().split())
+
+ def get_all_command_slugs(self) -> Iterable[CommandSlug]:
+ yield self.command_slug
+ yield from self.aliases
+
+
+MESSAGING_INTEGRATION_COMMANDS = (
+ HELP := MessagingIntegrationCommand("HELP", "help", aliases=("", "support", "docs")),
+ LINK_IDENTITY := MessagingIntegrationCommand("LINK_IDENTITY", "link"),
+ UNLINK_IDENTITY := MessagingIntegrationCommand("UNLINK_IDENTITY", "unlink"),
+ LINK_TEAM := MessagingIntegrationCommand("LINK_TEAM", "link team"),
+ UNLINK_TEAM := MessagingIntegrationCommand("UNLINK_TEAM", "unlink team"),
+)
+
+R = TypeVar("R") # response
+
+
+class MessagingIntegrationCommandDispatcher(Generic[R], ABC):
+ """The set of commands handled by one messaging integration."""
+
+ @property
+ @abstractmethod
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], R]]]:
+ raise NotImplementedError
+
+ def dispatch(self, cmd_input: CommandInput) -> R:
+ candidate_handlers = [
+ (slug, callback)
+ for (command, callback) in self.command_handlers
+ for slug in command.get_all_command_slugs()
+ ]
+
+ def parsing_order(handler: tuple[CommandSlug, Callable[[CommandInput], R]]) -> int:
+ # Sort by descending length of arg tokens. If one slug is a prefix of
+ # another (e.g., "link" and "link team"), we must check for the longer
+ # one first.
+ slug, _ = handler
+ return -len(slug.tokens)
+
+ candidate_handlers.sort(key=parsing_order)
+ for (slug, callback) in candidate_handlers:
+ if slug.does_match(cmd_input):
+ arg_input = cmd_input.adjust(slug)
+ return callback(arg_input)
+ raise CommandNotMatchedError(f"{cmd_input=!r}", cmd_input)
diff --git a/src/sentry/integrations/msteams/webhook.py b/src/sentry/integrations/msteams/webhook.py
index dbfe7fd00f3184..2fac342351a211 100644
--- a/src/sentry/integrations/msteams/webhook.py
+++ b/src/sentry/integrations/msteams/webhook.py
@@ -2,7 +2,8 @@
import logging
import time
-from collections.abc import Callable, Mapping
+from collections.abc import Callable, Iterable, Mapping
+from dataclasses import dataclass
from enum import Enum
from typing import Any, cast
@@ -20,6 +21,13 @@
from sentry.api.base import Endpoint, all_silo_endpoint
from sentry.identity.services.identity import identity_service
from sentry.identity.services.identity.model import RpcIdentity
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
from sentry.integrations.msteams import parsing
from sentry.integrations.msteams.spec import PROVIDER
from sentry.integrations.services.integration import integration_service
@@ -602,27 +610,50 @@ def _handle_channel_message(self, request: Request) -> Response:
def _handle_personal_message(self, request: Request) -> Response:
data = request.data
command_text = data.get("text", "").strip()
- lowercase_command = command_text.lower()
- conversation_id = data["conversation"]["id"]
- teams_user_id = data["from"]["id"]
-
- # only supporting unlink for now
- if "unlink" in lowercase_command:
- unlink_url = build_unlinking_url(conversation_id, data["serviceUrl"], teams_user_id)
- card = build_unlink_identity_card(unlink_url)
- elif "help" in lowercase_command:
- card = build_help_command_card()
- elif "link" == lowercase_command: # don't to match other types of link commands
- has_linked_identity = (
- identity_service.get_identity(filter={"identity_ext_id": teams_user_id}) is not None
- )
- if has_linked_identity:
- card = build_already_linked_identity_command_card()
- else:
- card = build_link_identity_command_card()
- else:
+
+ dispatcher = MsTeamsCommandDispatcher(data)
+ try:
+ card = dispatcher.dispatch(CommandInput(command_text))
+ except CommandNotMatchedError:
card = build_unrecognized_command_card(command_text)
client = get_preinstall_client(data["serviceUrl"])
- client.send_card(conversation_id, card)
+ client.send_card(dispatcher.conversation_id, card)
return self.respond(status=204)
+
+
+@dataclass(frozen=True)
+class MsTeamsCommandDispatcher(MessagingIntegrationCommandDispatcher[AdaptiveCard]):
+ data: dict[str, Any]
+
+ @property
+ def conversation_id(self) -> str:
+ return self.data["conversation"]["id"]
+
+ @property
+ def teams_user_id(self) -> str:
+ return self.data["from"]["id"]
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], AdaptiveCard]]]:
+ yield commands.HELP, (lambda _: build_help_command_card())
+ yield commands.LINK_IDENTITY, self.link_identity
+ yield commands.UNLINK_IDENTITY, self.unlink_identity
+
+ def link_identity(self, _: CommandInput) -> AdaptiveCard:
+ linked_identity = identity_service.get_identity(
+ filter={"identity_ext_id": self.teams_user_id}
+ )
+ has_linked_identity = linked_identity is not None
+ if has_linked_identity:
+ return build_already_linked_identity_command_card()
+ else:
+ return build_link_identity_command_card()
+
+ def unlink_identity(self, _: CommandInput) -> AdaptiveCard:
+ unlink_url = build_unlinking_url(
+ self.conversation_id, self.data["serviceUrl"], self.teams_user_id
+ )
+ return build_unlink_identity_card(unlink_url)
diff --git a/src/sentry/integrations/slack/requests/base.py b/src/sentry/integrations/slack/requests/base.py
index 5c4e75fe1db975..0a1f753f4ebc97 100644
--- a/src/sentry/integrations/slack/requests/base.py
+++ b/src/sentry/integrations/slack/requests/base.py
@@ -12,6 +12,7 @@
from sentry import options
from sentry.identity.services.identity import RpcIdentity, identity_service
from sentry.identity.services.identity.model import RpcIdentityProvider
+from sentry.integrations.messaging.commands import CommandInput
from sentry.integrations.services.integration import RpcIntegration, integration_service
from sentry.users.services.user import RpcUser
from sentry.users.services.user.service import user_service
@@ -276,5 +277,9 @@ def get_command_and_args(self) -> tuple[str, Sequence[str]]:
return "", []
return command[0], command[1:]
+ def get_command_input(self) -> CommandInput:
+ cmd, args = self.get_command_and_args()
+ return CommandInput(cmd, tuple(args))
+
def _validate_identity(self) -> None:
self.user = self.get_identity_user()
diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py
index 1d2eba49c6ba1b..f5a4c16a56cc0e 100644
--- a/src/sentry/integrations/slack/webhooks/base.py
+++ b/src/sentry/integrations/slack/webhooks/base.py
@@ -1,17 +1,28 @@
from __future__ import annotations
import abc
+import logging
+from collections.abc import Callable, Iterable
+from dataclasses import dataclass
from rest_framework import status
from rest_framework.response import Response
from sentry.api.base import Endpoint
+from sentry.integrations.messaging import commands
+from sentry.integrations.messaging.commands import (
+ CommandInput,
+ CommandNotMatchedError,
+ MessagingIntegrationCommand,
+ MessagingIntegrationCommandDispatcher,
+)
from sentry.integrations.slack.message_builder.help import SlackHelpMessageBuilder
from sentry.integrations.slack.metrics import (
SLACK_WEBHOOK_DM_ENDPOINT_FAILURE_DATADOG_METRIC,
SLACK_WEBHOOK_DM_ENDPOINT_SUCCESS_DATADOG_METRIC,
)
from sentry.integrations.slack.requests.base import SlackDMRequest, SlackRequestError
+from sentry.utils import metrics
LINK_USER_MESSAGE = (
"<{associate_url}|Link your Slack identity> to your Sentry account to receive notifications. "
@@ -24,9 +35,6 @@
NOT_LINKED_MESSAGE = "You do not have a linked identity to unlink."
ALREADY_LINKED_MESSAGE = "You are already linked as `{username}`."
-import logging
-
-from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -42,33 +50,21 @@ def post_dispatcher(self, request: SlackDMRequest) -> Response:
All Slack commands are handled by this endpoint. This block just
validates the request and dispatches it to the right handler.
"""
- command, args = request.get_command_and_args()
-
- if command in ["help", "", "support", "docs"]:
- return self.respond(SlackHelpMessageBuilder(command=command).build())
-
- if command == "link":
- if not args:
- return self.link_user(request)
-
- if args[0] == "team":
- return self.link_team(request)
-
- if command == "unlink":
- if not args:
- return self.unlink_user(request)
-
- if args[0] == "team":
- return self.unlink_team(request)
-
- # If we cannot interpret the command, print help text.
- request_data = request.data
- unknown_command = request_data.get("text", "").lower()
- return self.respond(SlackHelpMessageBuilder(unknown_command).build())
+ cmd_input = request.get_command_input()
+ try:
+ return SlackCommandDispatcher(self, request).dispatch(cmd_input)
+ except CommandNotMatchedError:
+ # If we cannot interpret the command, print help text.
+ request_data = request.data
+ unknown_command = request_data.get("text", "").lower()
+ return self.help(unknown_command)
def reply(self, slack_request: SlackDMRequest, message: str) -> Response:
raise NotImplementedError
+ def help(self, command: str) -> Response:
+ return self.respond(SlackHelpMessageBuilder(command).build())
+
def link_user(self, slack_request: SlackDMRequest) -> Response:
from sentry.integrations.slack.views.link_identity import build_linking_url
@@ -124,3 +120,19 @@ def link_team(self, slack_request: SlackDMRequest) -> Response:
def unlink_team(self, slack_request: SlackDMRequest) -> Response:
raise NotImplementedError
+
+
+@dataclass(frozen=True)
+class SlackCommandDispatcher(MessagingIntegrationCommandDispatcher[Response]):
+ endpoint: SlackDMEndpoint
+ request: SlackDMRequest
+
+ @property
+ def command_handlers(
+ self,
+ ) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], Response]]]:
+ yield commands.HELP, (lambda i: self.endpoint.help(i.cmd_value))
+ yield commands.LINK_IDENTITY, (lambda i: self.endpoint.link_user(self.request))
+ yield commands.UNLINK_IDENTITY, (lambda i: self.endpoint.unlink_user(self.request))
+ yield commands.LINK_TEAM, (lambda i: self.endpoint.link_team(self.request))
+ yield commands.UNLINK_TEAM, (lambda i: self.endpoint.unlink_team(self.request))
From dc3fc08495dd3d96e2d244d0b7e9c63f46f35ea2 Mon Sep 17 00:00:00 2001
From: Michelle Fu <83109586+mifu67@users.noreply.github.com>
Date: Wed, 2 Oct 2024 15:10:11 -0700
Subject: [PATCH 047/139] feat(alerts): require trigger actions to save metric
alerts (#78446)
Prevent saving metric alerts if each trigger doesn't have an associated
action
---
.../organization_alert_rule_index.py | 7 ++++++
.../test_organization_alert_rule_index.py | 23 ++++++++++++++-----
2 files changed, 24 insertions(+), 6 deletions(-)
diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py
index 91178a4eeed082..866654869b310e 100644
--- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py
+++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py
@@ -121,6 +121,13 @@ def create_metric_alert(
if not serializer.is_valid():
raise ValidationError(serializer.errors)
+ # if there are no triggers, then the serializer will raise an error
+ for trigger in data["triggers"]:
+ if not trigger.get("actions", []):
+ raise ValidationError(
+ "Each trigger must have an associated action for this alert to fire."
+ )
+
trigger_sentry_app_action_creators_for_incidents(serializer.validated_data)
if get_slack_actions_with_async_lookups(organization, request.user, request.data):
# need to kick off an async job for Slack
diff --git a/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py
index 1b9a657794ce99..bf124c85aa2204 100644
--- a/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py
+++ b/tests/sentry/incidents/endpoints/test_organization_alert_rule_index.py
@@ -921,12 +921,15 @@ def test_critical_trigger_no_action(self):
}
with self.feature("organizations:incidents"):
- resp = self.get_success_response(
- self.organization.slug, status_code=201, **rule_one_trigger_only_critical_no_action
+ resp = self.get_error_response(
+ self.organization.slug, status_code=400, **rule_one_trigger_only_critical_no_action
)
- assert "id" in resp.data
- alert_rule = AlertRule.objects.get(id=resp.data["id"])
- assert resp.data == serialize(alert_rule, self.user)
+ assert resp.data == [
+ ErrorDetail(
+ string="Each trigger must have an associated action for this alert to fire.",
+ code="invalid",
+ )
+ ]
def test_invalid_projects(self):
with self.feature("organizations:incidents"):
@@ -1009,7 +1012,15 @@ def test_no_owner(self):
"name": "JustATestRule",
"resolveThreshold": 100,
"thresholdType": 1,
- "triggers": [{"label": "critical", "alertThreshold": 75}],
+ "triggers": [
+ {
+ "label": "critical",
+ "alertThreshold": 75,
+ "actions": [
+ {"type": "email", "targetType": "team", "targetIdentifier": self.team.id}
+ ],
+ }
+ ],
}
with self.feature("organizations:incidents"):
From 11413294ccc6649634a17eda0b3bc6a44fc118d8 Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Wed, 2 Oct 2024 15:23:59 -0700
Subject: [PATCH 048/139] ref(grouping): Fix and refactor `save_aggregate` test
(#78528)
In an upcoming PR, `_save_aggregate_new` is going to get absorbed into its caller, `assign_event_to_group`. While working on that PR, one of the things I did was to rename (and move) the `test_save_aggregate.py` test module (mostly because `save_aggregate_new` is going away, but also to better reflect what it's actually testing, which is the locking behavior around new group creation).
It seemed a fairly benign change, but alas, suddenly a whole bunch of totally unrelated tests starting failing in CI (but only in CI - not locally). It turns out the problem was that the move/rename caused the `save_aggregate` tests to run in a different shard, which revealed the fact that the tests aren't actually thread-safe. (Indeed, they never have been, but until my change they hadn't been running alongside any tests which were sensitive to that.) More specifically, the problem is that the mocking done in the tests is done in each thread individually, rather than before the threads are split off. As far as I understand, this allows the mocking to interact with threads running other tests, thereby breaking them.
The fix, therefore, was to move the mocking out to the main level of the test function. As a bonus, this allowed for some further simplification: Because mocking isn't thread-safe, the code run in the threads needed a `try-finally`, because sometimes the mocking just didn't happen, leading to errors. This also meant that we had to manually close the transaction, because said errors would prevent it from closing automatically. With the threading fix, we now don't need either of those things.
So this PR makes those changes, and also pulls in a few refactors which had originally been in that PR. Mostly they're cosmetic - moving things around, clarifying some comments, etc. The only substantive change pulled from that PR was switching from testing whether or not things are working (`is_race_free`) to whether or not they aren't (`lock_disabled`).
---
.../grouping/test_group_creation_lock.py | 98 +++++++++++++++++
.../event_manager/test_save_aggregate.py | 103 ------------------
2 files changed, 98 insertions(+), 103 deletions(-)
create mode 100644 tests/sentry/event_manager/grouping/test_group_creation_lock.py
delete mode 100644 tests/sentry/event_manager/test_save_aggregate.py
diff --git a/tests/sentry/event_manager/grouping/test_group_creation_lock.py b/tests/sentry/event_manager/grouping/test_group_creation_lock.py
new file mode 100644
index 00000000000000..988d5e51ef4ed1
--- /dev/null
+++ b/tests/sentry/event_manager/grouping/test_group_creation_lock.py
@@ -0,0 +1,98 @@
+import contextlib
+import time
+from threading import Thread
+from unittest.mock import patch
+
+import pytest
+
+from sentry.event_manager import GroupInfo, _save_aggregate_new
+from sentry.eventstore.models import Event
+from sentry.testutils.pytest.fixtures import django_db_all
+
+CONCURRENCY = 2
+
+
+class FakeTransactionModule:
+ @staticmethod
+ @contextlib.contextmanager
+ def atomic(*args, **kwds):
+ yield
+
+
+def save_event(project_id: int, return_values: list[GroupInfo]) -> None:
+ event = Event(
+ project_id,
+ "11212012123120120415201309082013",
+ data={"timestamp": time.time()},
+ )
+
+ group_info = _save_aggregate_new(
+ event=event,
+ job={"event_metadata": {}, "release": "dogpark", "event": event, "data": {}},
+ metric_tags={},
+ )
+
+ assert group_info is not None
+ return_values.append(group_info)
+
+
+@django_db_all(transaction=True)
+@pytest.mark.parametrize(
+ "lock_disabled",
+ [
+ # Group creation with transaction isolation (which is what powers the lock) disabled, to
+ # show that without it, multiple groups are created when there's a race condition while
+ # ingesting events with the same data. This variant exists so that we can ensure the test
+ # would detect a malfunctioning lock in principle, and does not just always pass because of
+ # low parallelism. In a sense this variant tests the efficacy of this test, not actual
+ # business logic.
+ #
+ # If this variant fails, CONCURRENCY needs to be increased or e.g. thread barriers need to
+ # be used to ensure data races. This does not seem to be necessary so far.
+ True,
+ # Regular group creation, in which the lock should be working
+ False,
+ ],
+ ids=(" lock_disabled: True ", " lock_disabled: False "),
+)
+def test_group_creation_race(monkeypatch, default_project, lock_disabled):
+ if lock_disabled:
+ # Disable transaction isolation just within event manager, but not in
+ # GroupHash.objects.create_or_update
+ monkeypatch.setattr("sentry.event_manager.transaction", FakeTransactionModule)
+
+ # `select_for_update` cannot be used outside of transactions
+ monkeypatch.setattr("django.db.models.QuerySet.select_for_update", lambda self: self)
+
+ with (
+ patch(
+ "sentry.grouping.ingest.hashing._calculate_event_grouping",
+ return_value=["pound sign", "octothorpe"],
+ ),
+ patch(
+ "sentry.event_manager._get_group_processing_kwargs",
+ return_value={"level": 10, "culprit": "", "data": {}},
+ ),
+ patch("sentry.event_manager._materialize_metadata_many"),
+ ):
+ return_values: list[GroupInfo] = []
+ threads = []
+
+ # Save the same event data in multiple threads. If the lock is working, only one new group
+ # should be created
+ for _ in range(CONCURRENCY):
+ thread = Thread(target=save_event, args=[default_project.id, return_values])
+ thread.start()
+ threads.append(thread)
+
+ for thread in threads:
+ thread.join()
+
+ if not lock_disabled:
+ # assert only one new group was created
+ assert len({group_info.group.id for group_info in return_values}) == 1
+ assert sum(group_info.is_new for group_info in return_values) == 1
+ else:
+ # assert multiple new groups were created
+ assert 1 < len({group_info.group.id for group_info in return_values}) <= CONCURRENCY
+ assert 1 < sum(group_info.is_new for group_info in return_values) <= CONCURRENCY
diff --git a/tests/sentry/event_manager/test_save_aggregate.py b/tests/sentry/event_manager/test_save_aggregate.py
deleted file mode 100644
index 828a191d818145..00000000000000
--- a/tests/sentry/event_manager/test_save_aggregate.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import contextlib
-import time
-from threading import Thread
-from typing import Any
-from unittest.mock import patch
-
-import pytest
-from django.db import router, transaction
-
-from sentry.event_manager import _save_aggregate_new
-from sentry.eventstore.models import Event
-from sentry.models.grouphash import GroupHash
-from sentry.testutils.pytest.fixtures import django_db_all
-
-
-@django_db_all(transaction=True)
-@pytest.mark.parametrize(
- "is_race_free",
- [
- # regular group creation code, which is supposed to not have races
- True,
- # group creation code with removed transaction isolation, which is then
- # supposed to create multiple groups. This variant exists such that we can
- # ensure the test would find race conditions in principle, and does not
- # just always pass because of low parallelism. In a sense this variant
- # tests the efficacy of this test, not actual business logic.
- #
- # If this variant fails, CONCURRENCY needs to be increased or e.g. thread
- # barriers need to be used to ensure data races. This does not seem to be
- # necessary so far.
- False,
- ],
- ids=(" is_race_free: True ", " is_race_free: False "),
-)
-def test_group_creation_race_new(monkeypatch, default_project, is_race_free):
- CONCURRENCY = 2
-
- if not is_race_free:
-
- class FakeTransactionModule:
- @staticmethod
- @contextlib.contextmanager
- def atomic(*args, **kwds):
- yield
-
- # Disable transaction isolation just within event manager, but not in
- # GroupHash.objects.create_or_update
- monkeypatch.setattr("sentry.event_manager.transaction", FakeTransactionModule)
-
- # select_for_update cannot be used outside of transactions
- monkeypatch.setattr("django.db.models.QuerySet.select_for_update", lambda self: self)
-
- return_values = []
-
- event = Event(
- default_project.id,
- "11212012123120120415201309082013",
- data={"timestamp": time.time()},
- )
- hashes = ["pound sign", "octothorpe"]
-
- group_processing_kwargs = {"level": 10, "culprit": "", "data": {}}
- save_aggregate_kwargs: Any = {
- "event": event,
- "job": {"event_metadata": {}, "release": "dogpark", "event": event, "data": {}},
- "metric_tags": {},
- }
-
- def save_event():
- try:
- with patch(
- "sentry.grouping.ingest.hashing._calculate_event_grouping",
- return_value=hashes,
- ):
- with patch(
- "sentry.event_manager._get_group_processing_kwargs",
- return_value=group_processing_kwargs,
- ):
- with patch("sentry.event_manager._materialize_metadata_many"):
- group_info = _save_aggregate_new(**save_aggregate_kwargs)
-
- assert group_info is not None
- return_values.append(group_info)
- finally:
- transaction.get_connection(router.db_for_write(GroupHash)).close()
-
- threads = []
- for _ in range(CONCURRENCY):
- thread = Thread(target=save_event)
- thread.start()
- threads.append(thread)
-
- for thread in threads:
- thread.join()
-
- if is_race_free:
- # assert only one new group was created
- assert len({group_info.group.id for group_info in return_values}) == 1
- assert sum(group_info.is_new for group_info in return_values) == 1
- else:
- # assert multiple new groups were created
- assert 1 < len({group_info.group.id for group_info in return_values}) <= CONCURRENCY
- assert 1 < sum(group_info.is_new for group_info in return_values) <= CONCURRENCY
From a69e81baf2d6af029bb544ddb2f21f9aeede9d06 Mon Sep 17 00:00:00 2001
From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com>
Date: Wed, 2 Oct 2024 15:24:34 -0700
Subject: [PATCH 049/139] ref(tabs): Replace browserhistory.push with navigate
in tablist components (#78498)
browserhistory is deprecated
---
.../draggableTabs/draggableTabList.tsx | 5 ++-
static/app/components/tabs/tabList.tsx | 5 ++-
static/app/views/issueDetails/header.spec.tsx | 37 +++++++++++--------
.../streamline/eventNavigation.spec.tsx | 26 ++++++++-----
4 files changed, 44 insertions(+), 29 deletions(-)
diff --git a/static/app/components/draggableTabs/draggableTabList.tsx b/static/app/components/draggableTabs/draggableTabList.tsx
index b2bec19953f332..9426524ad4001f 100644
--- a/static/app/components/draggableTabs/draggableTabList.tsx
+++ b/static/app/components/draggableTabs/draggableTabList.tsx
@@ -29,9 +29,9 @@ import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {defined} from 'sentry/utils';
import {trackAnalytics} from 'sentry/utils/analytics';
-import {browserHistory} from 'sentry/utils/browserHistory';
import {useDimensions} from 'sentry/utils/useDimensions';
import {useDimensionsMultiple} from 'sentry/utils/useDimensionsMultiple';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
import type {DraggableTabListItemProps} from './item';
@@ -265,6 +265,7 @@ function BaseDraggableTabList({
tabVariant = 'filled',
...props
}: BaseDraggableTabListProps) {
+ const navigate = useNavigate();
const [hoveringKey, setHoveringKey] = useState(null);
const {rootProps, setTabListState} = useContext(TabsContext);
const organization = useOrganization();
@@ -295,7 +296,7 @@ function BaseDraggableTabList({
organization,
});
- browserHistory.push(linkTo);
+ navigate(linkTo);
},
isDisabled: disabled,
keyboardActivation,
diff --git a/static/app/components/tabs/tabList.tsx b/static/app/components/tabs/tabList.tsx
index 68cdb69b877a3b..eca812f1d53a08 100644
--- a/static/app/components/tabs/tabList.tsx
+++ b/static/app/components/tabs/tabList.tsx
@@ -14,7 +14,7 @@ import DropdownButton from 'sentry/components/dropdownButton';
import {IconEllipsis} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
-import {browserHistory} from 'sentry/utils/browserHistory';
+import {useNavigate} from 'sentry/utils/useNavigate';
import {TabsContext} from './index';
import type {TabListItemProps} from './item';
@@ -132,6 +132,7 @@ function BaseTabList({
variant = 'flat',
...props
}: BaseTabListProps) {
+ const navigate = useNavigate();
const tabListRef = useRef(null);
const {rootProps, setTabListState} = useContext(TabsContext);
const {
@@ -156,7 +157,7 @@ function BaseTabList({
if (!linkTo) {
return;
}
- browserHistory.push(linkTo);
+ navigate(linkTo);
},
isDisabled: disabled,
keyboardActivation,
diff --git a/static/app/views/issueDetails/header.spec.tsx b/static/app/views/issueDetails/header.spec.tsx
index c26b75560e95cc..61ad9f95a2b923 100644
--- a/static/app/views/issueDetails/header.spec.tsx
+++ b/static/app/views/issueDetails/header.spec.tsx
@@ -3,16 +3,17 @@ import {OrganizationFixture} from 'sentry-fixture/organization';
import {ProjectFixture} from 'sentry-fixture/project';
import {TeamFixture} from 'sentry-fixture/team';
+import {initializeOrg} from 'sentry-test/initializeOrg';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import {IssueCategory, PriorityLevel} from 'sentry/types/group';
-import {browserHistory} from 'sentry/utils/browserHistory';
import GroupHeader from 'sentry/views/issueDetails/header';
import {ReprocessingStatus} from 'sentry/views/issueDetails/utils';
describe('GroupHeader', () => {
const baseUrl = 'BASE_URL/';
const organization = OrganizationFixture();
+ const {router} = initializeOrg();
const project = ProjectFixture({
teams: [TeamFixture()],
});
@@ -51,50 +52,52 @@ describe('GroupHeader', () => {
organization={orgWithFeatures}
project={jsProjectWithSimilarityView}
/>,
- {organization: orgWithFeatures}
+ {organization: orgWithFeatures, router}
);
await userEvent.click(screen.getByRole('tab', {name: /details/i}));
- expect(browserHistory.push).toHaveBeenLastCalledWith('BASE_URL/');
+ expect(router.push).toHaveBeenLastCalledWith(
+ expect.objectContaining({pathname: 'BASE_URL/'})
+ );
await userEvent.click(screen.getByRole('tab', {name: /activity/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/activity/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /user feedback/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/feedback/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /attachments/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/attachments/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /tags/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/tags/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /all events/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/events/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /merged issues/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/merged/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /replays/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/replays/',
query: {},
});
@@ -137,11 +140,11 @@ describe('GroupHeader', () => {
organization={orgWithFeatures}
project={mobileProjectWithSimilarityView}
/>,
- {organization: orgWithFeatures}
+ {organization: orgWithFeatures, router}
);
await userEvent.click(screen.getByRole('tab', {name: /similar issues/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/similar/',
query: {},
});
@@ -184,20 +187,22 @@ describe('GroupHeader', () => {
organization={orgWithFeatures}
project={projectWithSimilarityView}
/>,
- {organization: orgWithFeatures}
+ {organization: orgWithFeatures, router}
);
await userEvent.click(screen.getByRole('tab', {name: /details/i}));
- expect(browserHistory.push).toHaveBeenLastCalledWith('BASE_URL/');
+ expect(router.push).toHaveBeenLastCalledWith(
+ expect.objectContaining({pathname: 'BASE_URL/'})
+ );
await userEvent.click(screen.getByRole('tab', {name: /tags/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/tags/',
query: {},
});
await userEvent.click(screen.getByRole('tab', {name: /sampled events/i}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: 'BASE_URL/events/',
query: {},
});
diff --git a/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx b/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
index 804e637d8e88a7..487044d6417b3b 100644
--- a/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
+++ b/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
@@ -1,9 +1,11 @@
import {EventFixture} from 'sentry-fixture/event';
import {GroupFixture} from 'sentry-fixture/group';
+import {LocationFixture} from 'sentry-fixture/locationFixture';
+import {RouterFixture} from 'sentry-fixture/routerFixture';
+import {initializeOrg} from 'sentry-test/initializeOrg';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
-import {browserHistory} from 'sentry/utils/browserHistory';
import * as useMedia from 'sentry/utils/useMedia';
import {SectionKey, useEventDetails} from 'sentry/views/issueDetails/streamline/context';
import {EventNavigation} from 'sentry/views/issueDetails/streamline/eventNavigation';
@@ -11,6 +13,7 @@ import {EventNavigation} from 'sentry/views/issueDetails/streamline/eventNavigat
jest.mock('sentry/views/issueDetails/streamline/context');
describe('EventNavigation', () => {
+ const {router} = initializeOrg();
const testEvent = EventFixture({
id: 'event-id',
size: 7,
@@ -58,11 +61,11 @@ describe('EventNavigation', () => {
it('can navigate to the oldest event', async () => {
jest.spyOn(useMedia, 'default').mockReturnValue(true);
- render( );
+ render( , {router});
await userEvent.click(screen.getByRole('tab', {name: 'First'}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/issues/group-id/events/oldest/',
query: {referrer: 'oldest-event'},
});
@@ -71,11 +74,11 @@ describe('EventNavigation', () => {
it('can navigate to the latest event', async () => {
jest.spyOn(useMedia, 'default').mockReturnValue(true);
- render( );
+ render( , {router});
await userEvent.click(screen.getByRole('tab', {name: 'Last'}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/issues/group-id/events/latest/',
query: {referrer: 'latest-event'},
});
@@ -84,15 +87,20 @@ describe('EventNavigation', () => {
it('can navigate to the recommended event', async () => {
jest.spyOn(useMedia, 'default').mockReturnValue(true);
+ const recommendedEventRouter = RouterFixture({
+ params: {eventId: 'latest'},
+ location: LocationFixture({
+ pathname: `/organizations/org-slug/issues/group-id/events/latest/`,
+ }),
+ });
+
render( , {
- router: {
- params: {eventId: 'latest'},
- },
+ router: recommendedEventRouter,
});
await userEvent.click(screen.getByRole('tab', {name: 'Recommended'}));
- expect(browserHistory.push).toHaveBeenCalledWith({
+ expect(recommendedEventRouter.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/issues/group-id/events/recommended/',
query: {referrer: 'recommended-event'},
});
From 29ba4b6a72035c7fc260fa2764d13bc1e7df467b Mon Sep 17 00:00:00 2001
From: Evan Purkhiser
Date: Wed, 2 Oct 2024 18:42:27 -0400
Subject: [PATCH 050/139] ref(rr6): better name for router building function
(#78522)
---
static/app/main.tsx | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/static/app/main.tsx b/static/app/main.tsx
index 3c163d08a0f272..ea1fc5a9da54f4 100644
--- a/static/app/main.tsx
+++ b/static/app/main.tsx
@@ -20,7 +20,7 @@ import {buildReactRouter6Routes} from './utils/reactRouter6Compat/router';
const queryClient = new QueryClient(DEFAULT_QUERY_CLIENT_CONFIG);
-function createReactRouter6Routes() {
+function buildRouter() {
const sentryCreateBrowserRouter = wrapCreateBrowserRouter(createBrowserRouter);
const router = sentryCreateBrowserRouter(buildReactRouter6Routes(routes()));
DANGEROUS_SET_REACT_ROUTER_6_HISTORY(router);
@@ -29,7 +29,7 @@ function createReactRouter6Routes() {
}
function Main() {
- const [router] = useState(createReactRouter6Routes);
+ const [router] = useState(buildRouter);
return (
From bb5a6837cb5b3d8d3b174e17d42ec14486ef8738 Mon Sep 17 00:00:00 2001
From: Dan Fuller
Date: Wed, 2 Oct 2024 16:15:36 -0700
Subject: [PATCH 051/139] fix(uptime): No-op bugged backfill (#78535)
This backfill failed due to dupes, we'll fix it and run it again later
---
.../migrations/0016_translate_uptime_object_headers_to_lists.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
index 7632e1bbef05e6..278170b716b14c 100644
--- a/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
+++ b/src/sentry/uptime/migrations/0016_translate_uptime_object_headers_to_lists.py
@@ -30,7 +30,7 @@ class Migration(CheckedMigration):
# is a schema change, it's completely safe to run the operation after the code has deployed.
# Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
- is_post_deployment = False
+ is_post_deployment = True
dependencies = [
("uptime", "0015_headers_deafult_empty_list"),
From 5b2f28ddbc09563644ff405f87a2b494672d70e7 Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Wed, 2 Oct 2024 18:09:26 -0700
Subject: [PATCH 052/139] ref(replay): capture parse and value errors in search
entrypoint (#78523)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Similar to https://github.com/getsentry/sentry/pull/76611/files. The
user.ip bug reported in
https://github.com/getsentry/sentry/issues/78286#issuecomment-2387065115
has no Sentry data, because of our API exception handlers. Sending these
to Sentry gives valuable debugging info for invalid searches.
HTTP response msg (status 400):
![Screenshot 2024-10-02 at 1 56
52 PM](https://github.com/user-attachments/assets/846c66d4-2678-4768-907a-df853ae9c9cf)
---
src/sentry/replays/usecases/query/__init__.py | 59 ++++++++++---------
1 file changed, 32 insertions(+), 27 deletions(-)
diff --git a/src/sentry/replays/usecases/query/__init__.py b/src/sentry/replays/usecases/query/__init__.py
index 5b1bbe2edaf5a5..1068a39fceb856 100644
--- a/src/sentry/replays/usecases/query/__init__.py
+++ b/src/sentry/replays/usecases/query/__init__.py
@@ -219,33 +219,38 @@ def query_using_optimized_search(
SearchFilter(SearchKey("environment"), "IN", SearchValue(environments)),
]
- # Translate "viewed_by_me" filters, which are aliases for "viewed_by_id"
- search_filters = handle_viewed_by_me_filters(search_filters, request_user_id)
-
- if preferred_source == "materialized-view":
- query, referrer, source = _query_using_materialized_view_strategy(
- search_filters,
- sort,
- project_ids,
- period_start,
- period_stop,
- )
- elif preferred_source == "aggregated":
- query, referrer, source = _query_using_aggregated_strategy(
- search_filters,
- sort,
- project_ids,
- period_start,
- period_stop,
- )
- else:
- query, referrer, source = _query_using_scalar_strategy(
- search_filters,
- sort,
- project_ids,
- period_start,
- period_stop,
- )
+ try:
+ # Translate "viewed_by_me" filters, which are aliases for "viewed_by_id"
+ search_filters = handle_viewed_by_me_filters(search_filters, request_user_id)
+
+ if preferred_source == "materialized-view":
+ query, referrer, source = _query_using_materialized_view_strategy(
+ search_filters,
+ sort,
+ project_ids,
+ period_start,
+ period_stop,
+ )
+ elif preferred_source == "aggregated":
+ query, referrer, source = _query_using_aggregated_strategy(
+ search_filters,
+ sort,
+ project_ids,
+ period_start,
+ period_stop,
+ )
+ else:
+ query, referrer, source = _query_using_scalar_strategy(
+ search_filters,
+ sort,
+ project_ids,
+ period_start,
+ period_stop,
+ )
+ except (ParseError, ValueError) as exc:
+ sentry_sdk.set_tag("org_id", organization.id if organization else None)
+ sentry_sdk.capture_exception(exc)
+ raise
query = query.set_limit(pagination.limit)
query = query.set_offset(pagination.offset)
From 40414eb3ea9d75108a5f80d6a9646be36076198a Mon Sep 17 00:00:00 2001
From: Alexander Tarasov
Date: Thu, 3 Oct 2024 08:59:46 +0200
Subject: [PATCH 053/139] feat(security): move GitHub Secret Scanning from
getsentry (#78386)
Merely moving the code from
https://github.com/getsentry/getsentry/pull/14624 (and follow-up fixes)
to the sentry repo.
---
.../api/endpoints/secret_scanning/github.py | 176 ++++++++++++++++
src/sentry/api/urls.py | 7 +
src/sentry/options/defaults.py | 8 +
.../sentry/emails/secret-scanning/body.html | 17 ++
.../sentry/emails/secret-scanning/body.txt | 15 ++
src/sentry/utils/github.py | 45 ++++
src/sentry/utils/github_client.py | 80 +++++++
static/app/data/controlsiloUrlPatterns.ts | 1 +
.../endpoints/secret_scanning/test_github.py | 196 ++++++++++++++++++
tests/sentry/utils/test_github.py | 68 ++++++
10 files changed, 613 insertions(+)
create mode 100644 src/sentry/api/endpoints/secret_scanning/github.py
create mode 100644 src/sentry/templates/sentry/emails/secret-scanning/body.html
create mode 100644 src/sentry/templates/sentry/emails/secret-scanning/body.txt
create mode 100644 src/sentry/utils/github.py
create mode 100644 src/sentry/utils/github_client.py
create mode 100644 tests/sentry/api/endpoints/secret_scanning/test_github.py
create mode 100644 tests/sentry/utils/test_github.py
diff --git a/src/sentry/api/endpoints/secret_scanning/github.py b/src/sentry/api/endpoints/secret_scanning/github.py
new file mode 100644
index 00000000000000..c96362be3b7140
--- /dev/null
+++ b/src/sentry/api/endpoints/secret_scanning/github.py
@@ -0,0 +1,176 @@
+import hashlib
+import logging
+
+import sentry_sdk
+from django.http import HttpResponse
+from django.utils import timezone
+from django.utils.decorators import method_decorator
+from django.views.decorators.csrf import csrf_exempt
+from django.views.generic.base import View
+
+from sentry import options
+from sentry.hybridcloud.models import ApiTokenReplica, OrgAuthTokenReplica
+from sentry.models.apitoken import ApiToken
+from sentry.models.orgauthtoken import OrgAuthToken
+from sentry.organizations.absolute_url import generate_organization_url
+from sentry.organizations.services.organization import organization_service
+from sentry.types.token import AuthTokenType
+from sentry.users.models.user import User
+from sentry.utils import json, metrics
+from sentry.utils.email import MessageBuilder
+from sentry.utils.github import verify_signature
+from sentry.utils.http import absolute_uri
+from sentry.web.frontend.base import control_silo_view
+
+logger = logging.getLogger(__name__)
+
+TOKEN_TYPE_HUMAN_READABLE = {
+ AuthTokenType.USER: "User Auth Token",
+ AuthTokenType.ORG: "Organization Auth Token",
+}
+
+REVOKE_URLS = {
+ AuthTokenType.USER: "/settings/account/api/auth-tokens/",
+ AuthTokenType.ORG: "/settings/auth-tokens/",
+}
+
+
+@control_silo_view
+class SecretScanningGitHubEndpoint(View):
+ @method_decorator(csrf_exempt)
+ def dispatch(self, request, *args, **kwargs):
+ if request.method != "POST":
+ return HttpResponse(status=405)
+
+ response = super().dispatch(request, *args, **kwargs)
+ metrics.incr(
+ "secret-scanning.github.webhooks",
+ 1,
+ tags={"status": response.status_code},
+ skip_internal=False,
+ )
+ return response
+
+ def post(self, request):
+ if request.headers.get("Content-Type") != "application/json":
+ return HttpResponse(
+ json.dumps({"details": "invalid content type specified"}), status=400
+ )
+
+ payload = request.body.decode("utf-8")
+ signature = request.headers.get("Github-Public-Key-Signature")
+ key_id = request.headers.get("Github-Public-Key-Identifier")
+
+ try:
+ if options.get("secret-scanning.github.enable-signature-verification"):
+ verify_signature(
+ payload,
+ signature,
+ key_id,
+ "secret_scanning",
+ )
+ except ValueError as e:
+ sentry_sdk.capture_exception(e)
+ return HttpResponse(json.dumps({"details": "invalid signature"}), status=400)
+
+ secret_alerts = json.loads(payload)
+ response = []
+ for secret_alert in secret_alerts:
+ alerted_token_str = secret_alert["token"]
+ hashed_alerted_token = hashlib.sha256(alerted_token_str.encode()).hexdigest()
+
+ # no prefix tokens could indicate old user auth tokens with no prefixes
+ token_type = AuthTokenType.USER
+ if alerted_token_str.startswith(AuthTokenType.ORG):
+ token_type = AuthTokenType.ORG
+ elif alerted_token_str.startswith((AuthTokenType.USER_APP, AuthTokenType.INTEGRATION)):
+ # TODO: add support for other token types
+ return HttpResponse(
+ json.dumps({"details": "auth token type is not implemented"}), status=501
+ )
+
+ try:
+ token: ApiToken | OrgAuthToken
+
+ if token_type == AuthTokenType.USER:
+ token = ApiToken.objects.get(hashed_token=hashed_alerted_token)
+
+ if token_type == AuthTokenType.ORG:
+ token = OrgAuthToken.objects.get(
+ token_hashed=hashed_alerted_token, date_deactivated=None
+ )
+
+ extra = {
+ "exposed_source": secret_alert["source"],
+ "exposed_url": secret_alert["url"],
+ "hashed_token": hashed_alerted_token,
+ "token_type": token_type,
+ }
+ logger.info("found an exposed auth token", extra=extra)
+
+ # TODO: mark an API token as exposed in the database
+
+ # TODO: expose this option in the UI
+ revoke_action_enabled = False
+ if revoke_action_enabled:
+ # TODO: revoke token
+ pass
+
+ # Send an email
+ url_prefix = options.get("system.url-prefix")
+ if isinstance(token, ApiToken):
+ # for user token, send an alert to the token owner
+ users = User.objects.filter(id=token.user_id)
+ elif isinstance(token, OrgAuthToken):
+ # for org token, send an alert to all organization owners
+ organization = organization_service.get(id=token.organization_id)
+ if organization is None:
+ continue
+
+ owner_members = organization_service.get_organization_owner_members(
+ organization_id=organization.id
+ )
+ user_ids = [om.user_id for om in owner_members]
+ users = User.objects.filter(id__in=user_ids)
+
+ url_prefix = generate_organization_url(organization.slug)
+
+ token_type_human_readable = TOKEN_TYPE_HUMAN_READABLE.get(token_type, "Auth Token")
+
+ revoke_url = absolute_uri(REVOKE_URLS.get(token_type, "/"), url_prefix=url_prefix)
+
+ context = {
+ "datetime": timezone.now(),
+ "token_name": token.name,
+ "token_type": token_type_human_readable,
+ "token_redacted": f"{token_type}...{token.token_last_characters}",
+ "hashed_token": hashed_alerted_token,
+ "exposed_source": secret_alert["source"],
+ "exposed_url": secret_alert["url"],
+ "revoke_url": revoke_url,
+ }
+
+ subject = f"Action Required: {token_type_human_readable} Exposed"
+ msg = MessageBuilder(
+ subject="{}{}".format(options.get("mail.subject-prefix"), subject),
+ template="sentry/emails/secret-scanning/body.txt",
+ html_template="sentry/emails/secret-scanning/body.html",
+ type="user.secret-scanning-alert",
+ context=context,
+ )
+ msg.send_async([u.username for u in users])
+ except (
+ ApiToken.DoesNotExist,
+ ApiTokenReplica.DoesNotExist,
+ OrgAuthToken.DoesNotExist,
+ OrgAuthTokenReplica.DoesNotExist,
+ ):
+ response.append(
+ {
+ "token_hash": hashed_alerted_token,
+ "token_type": secret_alert["type"],
+ "label": "false_positive",
+ }
+ )
+
+ return HttpResponse(json.dumps(response), status=200)
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index a99cf0986ea9c3..1110a8d33ec45f 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -55,6 +55,7 @@
from sentry.api.endpoints.relocations.recover import RelocationRecoverEndpoint
from sentry.api.endpoints.relocations.retry import RelocationRetryEndpoint
from sentry.api.endpoints.relocations.unpause import RelocationUnpauseEndpoint
+from sentry.api.endpoints.secret_scanning.github import SecretScanningGitHubEndpoint
from sentry.api.endpoints.seer_rpc import SeerRpcServiceEndpoint
from sentry.api.endpoints.source_map_debug_blue_thunder_edition import (
SourceMapDebugBlueThunderEditionEndpoint,
@@ -3320,6 +3321,12 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
RelocationPublicKeyEndpoint.as_view(),
name="sentry-api-0-relocations-public-key",
),
+ # Secret Scanning
+ re_path(
+ r"^secret-scanning/github/$",
+ SecretScanningGitHubEndpoint.as_view(),
+ name="sentry-api-0-secret-scanning-github",
+ ),
# Catch all
re_path(
r"^$",
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 743c84b486ec48..83572f3dc0471f 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -2747,3 +2747,11 @@
default=False,
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+
+# Secret Scanning. Allows to temporarily disable signature verification.
+register(
+ "secret-scanning.github.enable-signature-verification",
+ type=Bool,
+ default=True,
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
diff --git a/src/sentry/templates/sentry/emails/secret-scanning/body.html b/src/sentry/templates/sentry/emails/secret-scanning/body.html
new file mode 100644
index 00000000000000..6d878b97bad191
--- /dev/null
+++ b/src/sentry/templates/sentry/emails/secret-scanning/body.html
@@ -0,0 +1,17 @@
+{% extends "sentry/emails/base.html" %}
+
+{% load i18n %}
+
+{% block main %}
+ {{ token_type }} exposed
+ Your Sentry {{ token_type }} was found publicly on the internet. We recommend revoking this token immediately, as exposed tokens pose a security risk to your account.
+
+Name: {{ token_name }}
+Token: {{ token_redacted }}
+SHA256: {{ hashed_token }}
+
+Source: {{ exposed_source }}
+URL: {{ exposed_url }}
+Date: {{ datetime|date:"N j, Y, P e" }}
+ Read more about Sentry Auth Tokens .
+{% endblock %}
diff --git a/src/sentry/templates/sentry/emails/secret-scanning/body.txt b/src/sentry/templates/sentry/emails/secret-scanning/body.txt
new file mode 100644
index 00000000000000..4f0c01d488ac75
--- /dev/null
+++ b/src/sentry/templates/sentry/emails/secret-scanning/body.txt
@@ -0,0 +1,15 @@
+{{ token_type }} exposed
+
+Your Sentry {{ token_type }} was found publicly on the internet. We recommend revoking this token immediately, as exposed tokens pose a security risk to your account:
+{{ revoke_url }}
+
+Name: {{ token_name }}
+Token: {{ token_redacted }}
+SHA256: {{ hashed_token }}
+
+Source: {{ exposed_source }}
+URL: {{ exposed_url }}
+Date: {{ datetime|date:"N j, Y, P e" }}
+
+Read more about Sentry Auth Tokens:
+https://docs.sentry.io/account/auth-tokens/
diff --git a/src/sentry/utils/github.py b/src/sentry/utils/github.py
new file mode 100644
index 00000000000000..9c8eab15f11717
--- /dev/null
+++ b/src/sentry/utils/github.py
@@ -0,0 +1,45 @@
+import base64
+import binascii
+from typing import Any
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec
+from pydantic import BaseModel
+
+from sentry import options
+
+from .github_client import GitHubClient
+
+
+class GitHubKeysPayload(BaseModel):
+ public_keys: list[dict[str, Any]]
+
+
+def verify_signature(payload: str, signature: str, key_id: str, subpath: str) -> None:
+ if not payload or not signature or not key_id:
+ raise ValueError("Invalid payload, signature, or key_id")
+
+ client_id = options.get("github-login.client-id")
+ client_secret = options.get("github-login.client-secret")
+ client = GitHubClient(client_id=client_id, client_secret=client_secret)
+ response = client.get(f"/meta/public_keys/{subpath}")
+ keys = GitHubKeysPayload.parse_obj(response)
+
+ public_key = next((k for k in keys.public_keys if k["key_identifier"] == key_id), None)
+ if not public_key:
+ raise ValueError("No public key found matching key identifier")
+
+ key = serialization.load_pem_public_key(public_key["key"].encode())
+
+ if not isinstance(key, ec.EllipticCurvePublicKey):
+ raise ValueError("Invalid public key type")
+
+ try:
+ # Decode the base64 signature to bytes
+ signature_bytes = base64.b64decode(signature)
+ key.verify(signature_bytes, payload.encode(), ec.ECDSA(hashes.SHA256()))
+ except InvalidSignature:
+ raise ValueError("Signature does not match payload")
+ except binascii.Error:
+ raise ValueError("Invalid signature encoding")
diff --git a/src/sentry/utils/github_client.py b/src/sentry/utils/github_client.py
new file mode 100644
index 00000000000000..37a079c545338f
--- /dev/null
+++ b/src/sentry/utils/github_client.py
@@ -0,0 +1,80 @@
+from requests.exceptions import HTTPError
+
+from sentry.http import build_session
+from sentry.utils import json
+
+
+class ApiError(Exception):
+ code = None
+ json = None
+ xml = None
+
+ def __init__(self, text, code=None):
+ if code is not None:
+ self.code = code
+ self.text = text
+ # TODO(dcramer): pull in XML support from Jira
+ if text:
+ try:
+ self.json = json.loads(text)
+ except (json.JSONDecodeError, ValueError):
+ self.json = None
+ else:
+ self.json = None
+ super().__init__(text[:128])
+
+ @classmethod
+ def from_response(cls, response):
+ if response.status_code == 401:
+ return ApiUnauthorized(response.text)
+ return cls(response.text, response.status_code)
+
+
+class ApiUnauthorized(ApiError):
+ code = 401
+
+
+class GitHubClient:
+ ApiError = ApiError
+
+ url = "https://api.github.com"
+
+ def __init__(self, url=None, token=None, client_id=None, client_secret=None):
+ if url is not None:
+ self.url = url.rstrip("/")
+ self.token = token
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+ def _request(self, method, path, headers=None, data=None, params=None, auth=None):
+ with build_session() as session:
+ try:
+ resp = getattr(session, method.lower())(
+ url=f"{self.url}{path}",
+ headers=headers,
+ json=data,
+ params=params,
+ allow_redirects=True,
+ auth=auth,
+ )
+ resp.raise_for_status()
+ except HTTPError as e:
+ raise ApiError.from_response(e.response)
+ return resp.json()
+
+ def request(self, method, path, data=None, params=None, auth=None):
+ headers = {"Accept": "application/vnd.github.valkyrie-preview+json"}
+
+ if self.token:
+ headers.setdefault("Authorization", f"token {self.token}")
+
+ elif auth is None and self.client_id and self.client_secret:
+ auth = (self.client_id, self.client_secret)
+
+ return self._request(method, path, headers=headers, data=data, params=params, auth=auth)
+
+ def get(self, *args, **kwargs):
+ return self.request("GET", *args, **kwargs)
+
+ def post(self, *args, **kwargs):
+ return self.request("POST", *args, **kwargs)
diff --git a/static/app/data/controlsiloUrlPatterns.ts b/static/app/data/controlsiloUrlPatterns.ts
index a1d35367955068..f0e1959803be80 100644
--- a/static/app/data/controlsiloUrlPatterns.ts
+++ b/static/app/data/controlsiloUrlPatterns.ts
@@ -136,6 +136,7 @@ const patterns: RegExp[] = [
new RegExp('^api/0/internal/integration-proxy/$'),
new RegExp('^api/0/internal/rpc/[^/]+/[^/]+/$'),
new RegExp('^api/0/internal/feature-flags/$'),
+ new RegExp('^api/0/secret-scanning/github/$'),
new RegExp('^api/hooks/mailgun/inbound/'),
new RegExp('^oauth/authorize/$'),
new RegExp('^oauth/token/$'),
diff --git a/tests/sentry/api/endpoints/secret_scanning/test_github.py b/tests/sentry/api/endpoints/secret_scanning/test_github.py
new file mode 100644
index 00000000000000..902e61e4eb7299
--- /dev/null
+++ b/tests/sentry/api/endpoints/secret_scanning/test_github.py
@@ -0,0 +1,196 @@
+from unittest.mock import patch
+
+from django.core import mail
+from django.urls import reverse
+from django.utils import timezone
+
+from sentry.models.apitoken import ApiToken
+from sentry.models.orgauthtoken import OrgAuthToken
+from sentry.testutils.cases import TestCase
+from sentry.testutils.helpers import override_options
+from sentry.testutils.silo import control_silo_test
+from sentry.types.token import AuthTokenType
+from sentry.utils import json
+from sentry.utils.security.orgauthtoken_token import generate_token, hash_token
+
+
+@control_silo_test
+class SecretScanningGitHubTest(TestCase):
+ path = reverse("sentry-api-0-secret-scanning-github")
+
+ def test_invalid_content_type(self):
+ response = self.client.post(self.path, content_type="application/x-www-form-urlencoded")
+ assert response.status_code == 400
+ assert response.content == b'{"details":"invalid content type specified"}'
+
+ def test_invalid_signature(self):
+ response = self.client.post(self.path, content_type="application/json")
+ assert response.status_code == 400
+ assert response.content == b'{"details":"invalid signature"}'
+
+ @override_options({"secret-scanning.github.enable-signature-verification": False})
+ def test_false_positive(self):
+ payload = [
+ {
+ "source": "commit",
+ "token": "some_token",
+ "type": "some_type",
+ "url": "https://example.com/base-repo-url/",
+ }
+ ]
+ response = self.client.post(self.path, content_type="application/json", data=payload)
+ assert response.status_code == 200
+ assert (
+ response.content
+ == b'[{"token_hash":"9a45520a1213f15016d2d768b5fb3d904492a44ee274b44d4de8803e00fb536a","token_type":"some_type","label":"false_positive"}]'
+ )
+
+ @override_options({"secret-scanning.github.enable-signature-verification": False})
+ def test_false_positive_deactivated_user_token(self):
+ user = self.create_user()
+ token = ApiToken.objects.create(user=user, name="test user token", scope_list=[])
+
+ # revoke token
+ token.delete()
+
+ payload = [
+ {
+ "source": "commit",
+ "token": str(token),
+ "type": "sentry_user_auth_token",
+ "url": "https://example.com/base-repo-url/",
+ }
+ ]
+
+ with self.tasks():
+ response = self.client.post(self.path, content_type="application/json", data=payload)
+ assert response.status_code == 200
+ expected = [
+ {
+ "token_hash": hash_token(str(token)),
+ "token_type": "sentry_user_auth_token",
+ "label": "false_positive",
+ }
+ ]
+ assert json.loads(response.content.decode("utf-8")) == expected
+
+ assert len(mail.outbox) == 0
+
+ @override_options({"secret-scanning.github.enable-signature-verification": False})
+ def test_false_positive_deactivated_org_token(self):
+ token_str = generate_token("test-org", "https://test-region.sentry.io")
+ hash_digest = hash_token(token_str)
+ token = OrgAuthToken.objects.create(
+ organization_id=self.organization.id,
+ name="test org token",
+ scope_list=["org:ci"],
+ token_hashed=hash_digest,
+ )
+
+ # revoke token
+ token.update(date_deactivated=timezone.now())
+
+ payload = [
+ {
+ "source": "commit",
+ "token": token_str,
+ "type": "sentry_org_auth_token",
+ "url": "https://example.com/base-repo-url/",
+ }
+ ]
+
+ with self.tasks():
+ response = self.client.post(self.path, content_type="application/json", data=payload)
+ assert response.status_code == 200
+ expected = [
+ {
+ "token_hash": hash_digest,
+ "token_type": "sentry_org_auth_token",
+ "label": "false_positive",
+ }
+ ]
+ assert json.loads(response.content.decode("utf-8")) == expected
+
+ assert len(mail.outbox) == 0
+
+ @override_options({"secret-scanning.github.enable-signature-verification": False})
+ @patch("sentry.api.endpoints.secret_scanning.github.logger")
+ def test_true_positive_user_token(self, mock_logger):
+ user = self.create_user()
+ token = ApiToken.objects.create(user=user, name="test user token", scope_list=[])
+
+ payload = [
+ {
+ "source": "commit",
+ "token": str(token),
+ "type": "sentry_user_auth_token",
+ "url": "https://example.com/base-repo-url/",
+ }
+ ]
+
+ with self.tasks():
+ response = self.client.post(self.path, content_type="application/json", data=payload)
+ assert response.status_code == 200
+ assert response.content == b"[]"
+
+ extra = {
+ "exposed_source": "commit",
+ "exposed_url": "https://example.com/base-repo-url/",
+ "hashed_token": token.hashed_token,
+ "token_type": AuthTokenType.USER,
+ }
+ mock_logger.info.assert_called_with("found an exposed auth token", extra=extra)
+
+ assert len(mail.outbox) == 1
+ assert mail.outbox[0].to == [user.username]
+ assert mail.outbox[0].subject == "[Sentry]Action Required: User Auth Token Exposed"
+ assert (
+ "Your Sentry User Auth Token was found publicly on the internet" in mail.outbox[0].body
+ )
+ assert "http://testserver/settings/account/api/auth-tokens" in mail.outbox[0].body
+ assert "test user token" in mail.outbox[0].body
+ assert token.hashed_token in mail.outbox[0].body
+
+ @override_options({"secret-scanning.github.enable-signature-verification": False})
+ @patch("sentry.api.endpoints.secret_scanning.github.logger")
+ def test_true_positive_org_token(self, mock_logger):
+ token_str = generate_token("test-org", "https://test-region.sentry.io")
+ token = OrgAuthToken.objects.create(
+ organization_id=self.organization.id,
+ name="test org token",
+ scope_list=["org:ci"],
+ token_hashed=hash_token(token_str),
+ )
+
+ payload = [
+ {
+ "source": "commit",
+ "token": token_str,
+ "type": "sentry_org_auth_token",
+ "url": "https://example.com/base-repo-url/",
+ }
+ ]
+
+ with self.tasks():
+ response = self.client.post(self.path, content_type="application/json", data=payload)
+ assert response.status_code == 200
+ assert response.content == b"[]"
+
+ extra = {
+ "exposed_source": "commit",
+ "exposed_url": "https://example.com/base-repo-url/",
+ "hashed_token": token.token_hashed,
+ "token_type": AuthTokenType.ORG,
+ }
+ mock_logger.info.assert_called_with("found an exposed auth token", extra=extra)
+
+ assert len(mail.outbox) == 1
+ assert mail.outbox[0].to == [self.user.username]
+ assert mail.outbox[0].subject == "[Sentry]Action Required: Organization Auth Token Exposed"
+ assert (
+ "Your Sentry Organization Auth Token was found publicly on the internet"
+ in mail.outbox[0].body
+ )
+ assert "http://baz.testserver/settings/auth-tokens/" in mail.outbox[0].body
+ assert "test org token" in mail.outbox[0].body
+ assert token.token_hashed in mail.outbox[0].body
diff --git a/tests/sentry/utils/test_github.py b/tests/sentry/utils/test_github.py
new file mode 100644
index 00000000000000..79212823ab51ea
--- /dev/null
+++ b/tests/sentry/utils/test_github.py
@@ -0,0 +1,68 @@
+from unittest import TestCase
+
+import pytest
+import responses
+
+from sentry.utils.github import verify_signature
+
+GITHUB_META_PUBLIC_KEYS_RESPONSE = {
+ "public_keys": [
+ {
+ "key_identifier": "90a421169f0a406205f1563a953312f0be898d3c7b6c06b681aa86a874555f4a",
+ "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE9MJJHnMfn2+H4xL4YaPDA4RpJqUq\nkCmRCBnYERxZanmcpzQSXs1X/AljlKkbJ8qpVIW4clayyef9gWhFbNHWAA==\n-----END PUBLIC KEY-----\n",
+ "is_current": False,
+ },
+ {
+ "key_identifier": "bcb53661c06b4728e59d897fb6165d5c9cda0fd9cdf9d09ead458168deb7518c",
+ "key": "-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEYAGMWO8XgCamYKMJS6jc/qgvSlAd\nAjPuDPRcXU22YxgBrz+zoN19MzuRyW87qEt9/AmtoNP5GrobzUvQSyJFVw==\n-----END PUBLIC KEY-----\n",
+ "is_current": True,
+ },
+ ]
+}
+
+
+class TestGitHub(TestCase):
+ def setUp(self):
+ # https://docs.github.com/en/code-security/secret-scanning/secret-scanning-partner-program#implement-signature-verification-in-your-secret-alert-service
+ self.payload = """[{"source":"commit","token":"some_token","type":"some_type","url":"https://example.com/base-repo-url/"}]"""
+ self.signature = "MEQCIQDaMKqrGnE27S0kgMrEK0eYBmyG0LeZismAEz/BgZyt7AIfXt9fErtRS4XaeSt/AO1RtBY66YcAdjxji410VQV4xg=="
+ self.key_id = "bcb53661c06b4728e59d897fb6165d5c9cda0fd9cdf9d09ead458168deb7518c"
+ self.subpath = "secret_scanning"
+
+ @responses.activate
+ def _verify(self):
+ responses.add(
+ responses.GET,
+ "https://api.github.com/meta/public_keys/secret_scanning",
+ json=GITHUB_META_PUBLIC_KEYS_RESPONSE,
+ status=200,
+ )
+
+ verify_signature(self.payload, self.signature, self.key_id, self.subpath)
+
+ def test_verify_signature_success(self):
+ self._verify()
+
+ def test_verify_signature_missing_key(self):
+ self.key_id = ""
+ with pytest.raises(ValueError) as excinfo:
+ self._verify()
+ assert "Invalid payload, signature, or key_id" in str(excinfo.value)
+
+ def test_verify_signature_invalid_key(self):
+ self.key_id = "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
+ with pytest.raises(ValueError) as excinfo:
+ self._verify()
+ assert "No public key found matching key identifier" in str(excinfo.value)
+
+ def test_verify_signature_invalid_signature(self):
+ self.payload = "[]"
+ with pytest.raises(ValueError) as excinfo:
+ self._verify()
+ assert "Signature does not match payload" in str(excinfo.value)
+
+ def test_verify_signature_invalid_encoding(self):
+ self.signature = "fakesignature"
+ with pytest.raises(ValueError) as excinfo:
+ self._verify()
+ assert "Invalid signature encoding" in str(excinfo.value)
From 15fe55530b5c8a4e505293ce1a9200979e9ed711 Mon Sep 17 00:00:00 2001
From: Matej Minar
Date: Thu, 3 Oct 2024 09:24:57 +0200
Subject: [PATCH 054/139] feat(onboarding): Remove unused next steps (#78461)
These next steps had inconsistent conditional logic based on the product
selection.
It was confusing and, at that point of onboarding, just a distraction.
We hooked up analytics, and the click rate was around 1%.
This PR removes the docs links for additional products and keeps
relevant information there (like integration to React Router).
## Before
![CleanShot 2024-10-02 at 13 31
12](https://github.com/user-attachments/assets/99b18b09-f37f-4a2a-847c-4d773c55e9d0)
## After
![CleanShot 2024-10-02 at 13 31
39](https://github.com/user-attachments/assets/2bf9107e-8821-42d0-90bf-ad486bb14b1e)
Closes https://github.com/getsentry/sentry/issues/78322
---
.../gettingStartedDoc/onboardingLayout.tsx | 2 +-
static/app/gettingStartedDocs/apple/ios.tsx | 8 ---
static/app/gettingStartedDocs/apple/macos.tsx | 8 ---
.../app/gettingStartedDocs/bun/bun.spec.tsx | 3 -
static/app/gettingStartedDocs/bun/bun.tsx | 14 +----
.../capacitor/capacitor.tsx | 22 +------
.../app/gettingStartedDocs/deno/deno.spec.tsx | 3 -
static/app/gettingStartedDocs/deno/deno.tsx | 14 +----
static/app/gettingStartedDocs/java/java.tsx | 8 ---
.../gettingStartedDocs/java/spring-boot.tsx | 8 ---
static/app/gettingStartedDocs/java/spring.tsx | 8 ---
.../gettingStartedDocs/javascript/angular.tsx | 61 +++----------------
.../gettingStartedDocs/javascript/astro.tsx | 16 -----
.../gettingStartedDocs/javascript/ember.tsx | 19 +-----
.../gettingStartedDocs/javascript/gatsby.tsx | 19 +-----
.../javascript/javascript.tsx | 43 +------------
.../gettingStartedDocs/javascript/react.tsx | 16 -----
.../gettingStartedDocs/javascript/solid.tsx | 16 -----
.../javascript/solidstart.tsx | 16 -----
.../gettingStartedDocs/javascript/svelte.tsx | 16 -----
.../app/gettingStartedDocs/javascript/vue.tsx | 59 +++---------------
.../app/gettingStartedDocs/kotlin/kotlin.tsx | 8 ---
22 files changed, 25 insertions(+), 362 deletions(-)
diff --git a/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx b/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
index 568a1e5a79d986..aa9a84de629617 100644
--- a/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/onboardingLayout.tsx
@@ -170,7 +170,7 @@ export function OnboardingLayout({
{nextSteps.length > 0 && (
- {t('Next Steps')}
+ {t('Additional Information')}
{nextSteps
.filter((step): step is Exclude => step !== null)
diff --git a/static/app/gettingStartedDocs/apple/ios.tsx b/static/app/gettingStartedDocs/apple/ios.tsx
index 6873dd9c5a9973..670a318b64880e 100644
--- a/static/app/gettingStartedDocs/apple/ios.tsx
+++ b/static/app/gettingStartedDocs/apple/ios.tsx
@@ -521,14 +521,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with SwiftUI.'),
link: 'https://docs.sentry.io/platforms/apple/tracing/instrumentation/swiftui-instrumentation/',
},
- {
- id: 'profiling',
- name: t('Profiling'),
- description: t(
- 'Collect and analyze performance profiles from real user devices in production.'
- ),
- link: 'https://docs.sentry.io/platforms/apple/profiling/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/apple/macos.tsx b/static/app/gettingStartedDocs/apple/macos.tsx
index 1cfcc4871a346a..399ee10e391a25 100644
--- a/static/app/gettingStartedDocs/apple/macos.tsx
+++ b/static/app/gettingStartedDocs/apple/macos.tsx
@@ -215,14 +215,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with SwiftUI.'),
link: 'https://docs.sentry.io/platforms/apple/tracing/instrumentation/swiftui-instrumentation/',
},
- {
- id: 'profiling',
- name: t('Profiling'),
- description: t(
- 'Collect and analyze performance profiles from real user devices in production.'
- ),
- link: 'https://docs.sentry.io/platforms/apple/profiling/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/bun/bun.spec.tsx b/static/app/gettingStartedDocs/bun/bun.spec.tsx
index b7293806881ad3..9b4b615690575f 100644
--- a/static/app/gettingStartedDocs/bun/bun.spec.tsx
+++ b/static/app/gettingStartedDocs/bun/bun.spec.tsx
@@ -28,8 +28,5 @@ describe('bun onboarding docs', function () {
expect(
screen.queryByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0,/))
).not.toBeInTheDocument();
-
- // Renders next steps
- expect(screen.getByRole('link', {name: 'Tracing'})).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/bun/bun.tsx b/static/app/gettingStartedDocs/bun/bun.tsx
index aa60f58c391627..57e8a22dc10e1c 100644
--- a/static/app/gettingStartedDocs/bun/bun.tsx
+++ b/static/app/gettingStartedDocs/bun/bun.tsx
@@ -92,19 +92,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params =>
- params.isPerformanceSelected
- ? []
- : [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/bun/tracing/',
- },
- ],
+ nextSteps: () => [],
};
const customMetricsOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/capacitor/capacitor.tsx b/static/app/gettingStartedDocs/capacitor/capacitor.tsx
index 36df3359bc49e5..b268bb760ab73d 100644
--- a/static/app/gettingStartedDocs/capacitor/capacitor.tsx
+++ b/static/app/gettingStartedDocs/capacitor/capacitor.tsx
@@ -242,7 +242,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params => [
+ nextSteps: () => [
{
id: 'capacitor-android-setup',
name: t('Capacitor 2 Setup'),
@@ -251,26 +251,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/?#capacitor-2---android-specifics',
},
- params.isPerformanceSelected
- ? null
- : {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/tracing/',
- },
- params.isReplaySelected
- ? null
- : {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/deno/deno.spec.tsx b/static/app/gettingStartedDocs/deno/deno.spec.tsx
index f1ef4b757143e8..7da88a5de9945b 100644
--- a/static/app/gettingStartedDocs/deno/deno.spec.tsx
+++ b/static/app/gettingStartedDocs/deno/deno.spec.tsx
@@ -28,8 +28,5 @@ describe('deno onboarding docs', function () {
expect(
screen.queryByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0,/))
).not.toBeInTheDocument();
-
- // Renders next steps
- expect(screen.getByRole('link', {name: 'Tracing'})).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/deno/deno.tsx b/static/app/gettingStartedDocs/deno/deno.tsx
index 347b92ae1e3cea..2303cf885e5a75 100644
--- a/static/app/gettingStartedDocs/deno/deno.tsx
+++ b/static/app/gettingStartedDocs/deno/deno.tsx
@@ -101,19 +101,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params =>
- params.isPerformanceSelected
- ? []
- : [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/bun/tracing/',
- },
- ],
+ nextSteps: () => [],
};
const customMetricsOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/java/java.tsx b/static/app/gettingStartedDocs/java/java.tsx
index 8b78d6cadc16d7..f236144f27b656 100644
--- a/static/app/gettingStartedDocs/java/java.tsx
+++ b/static/app/gettingStartedDocs/java/java.tsx
@@ -286,14 +286,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/java/spring-boot.tsx b/static/app/gettingStartedDocs/java/spring-boot.tsx
index dedbbfafe68200..7acd0784ea000b 100644
--- a/static/app/gettingStartedDocs/java/spring-boot.tsx
+++ b/static/app/gettingStartedDocs/java/spring-boot.tsx
@@ -288,14 +288,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/guides/spring-boot/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/java/spring.tsx b/static/app/gettingStartedDocs/java/spring.tsx
index ef8641dd486336..024aab936db9d8 100644
--- a/static/app/gettingStartedDocs/java/spring.tsx
+++ b/static/app/gettingStartedDocs/java/spring.tsx
@@ -361,14 +361,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/guides/spring/tracing/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx
index ac15d61ea18fbd..a79d23c9743cd8 100644
--- a/static/app/gettingStartedDocs/javascript/angular.tsx
+++ b/static/app/gettingStartedDocs/javascript/angular.tsx
@@ -31,7 +31,6 @@ import {
getReplayConfigureDescription,
getReplayVerifyStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
-import {ProductSolution} from 'sentry/components/onboarding/productSelection';
import {t, tct} from 'sentry/locale';
export enum AngularConfigType {
@@ -248,30 +247,6 @@ function getVerifyConfiguration(): Configuration {
};
}
-const getNextStep = (
- params: Params
-): {
- description: string;
- id: string;
- link: string;
- name: string;
-}[] => {
- let nextStepDocs = [...nextSteps];
-
- if (params.isPerformanceSelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.PERFORMANCE_MONITORING
- );
- }
-
- if (params.isReplaySelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.SESSION_REPLAY
- );
- }
- return nextStepDocs;
-};
-
const getInstallConfig = () => [
{
language: 'bash',
@@ -374,34 +349,18 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: (params: Params) => getNextStep(params),
+ nextSteps: () => [
+ {
+ id: 'angular-features',
+ name: t('Angular Features'),
+ description: t(
+ 'Learn about our first class integration with the Angular framework.'
+ ),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/angular/features/',
+ },
+ ],
};
-export const nextSteps = [
- {
- id: 'angular-features',
- name: t('Angular Features'),
- description: t('Learn about our first class integration with the Angular framework.'),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/features/',
- },
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/angular/session-replay/',
- },
-];
-
const replayOnboarding: OnboardingConfig = {
install: () => [
{
diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx
index e25cf161d1ee75..abc538d2595b07 100644
--- a/static/app/gettingStartedDocs/javascript/astro.tsx
+++ b/static/app/gettingStartedDocs/javascript/astro.tsx
@@ -205,22 +205,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/astro/manual-setup/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/astro/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/astro/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx
index fb85120886720a..61174ee899e454 100644
--- a/static/app/gettingStartedDocs/javascript/ember.tsx
+++ b/static/app/gettingStartedDocs/javascript/ember.tsx
@@ -178,24 +178,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/ember/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/ember/session-replay/',
- },
- ],
+ nextSteps: () => [],
};
const replayOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx
index d964be73e75b1d..30643c39fb6a16 100644
--- a/static/app/gettingStartedDocs/javascript/gatsby.tsx
+++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx
@@ -208,24 +208,7 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/session-replay/',
- },
- ],
+ nextSteps: () => [],
};
const replayOnboarding: OnboardingConfig = {
diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx
index 4b3b2bc5a6d5c7..524309f46648ec 100644
--- a/static/app/gettingStartedDocs/javascript/javascript.tsx
+++ b/static/app/gettingStartedDocs/javascript/javascript.tsx
@@ -252,36 +252,12 @@ const loaderScriptOnboarding: OnboardingConfig = {
],
verify: getVerifyConfig,
nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
- },
{
id: 'source-maps',
name: t('Source Maps'),
description: t('Learn how to enable readable stack traces in your Sentry errors.'),
link: 'https://docs.sentry.io/platforms/javascript/sourcemaps/',
},
- {
- id: 'sdk-configuration',
- name: t('SDK Configuration'),
- description: t(
- 'Learn about additional configuration options for the Javascript SDK.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/configuration/',
- },
],
onPageLoad: params => {
return () => {
@@ -367,24 +343,7 @@ const packageManagerOnboarding: OnboardingConfig = {
}),
],
verify: getVerifyConfig,
- nextSteps: () => [
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
- },
- ],
+ nextSteps: () => [],
onPageLoad: params => {
return () => {
trackAnalytics('onboarding.js_loader_npm_docs_shown', {
diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx
index 81ce0c83c0e7f2..782ae725d48da5 100644
--- a/static/app/gettingStartedDocs/javascript/react.tsx
+++ b/static/app/gettingStartedDocs/javascript/react.tsx
@@ -202,22 +202,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/react/configuration/integrations/react-router/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/react/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/react/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/solid.tsx b/static/app/gettingStartedDocs/javascript/solid.tsx
index d478f539020ab7..9b00730166e3ef 100644
--- a/static/app/gettingStartedDocs/javascript/solid.tsx
+++ b/static/app/gettingStartedDocs/javascript/solid.tsx
@@ -209,22 +209,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with the Solid framework.'),
link: 'https://docs.sentry.io/platforms/javascript/guides/solid/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/solidstart.tsx b/static/app/gettingStartedDocs/javascript/solidstart.tsx
index 9bd550a53d5308..643afe3747330f 100644
--- a/static/app/gettingStartedDocs/javascript/solidstart.tsx
+++ b/static/app/gettingStartedDocs/javascript/solidstart.tsx
@@ -388,22 +388,6 @@ const onboarding: OnboardingConfig = {
description: t('Learn about our first class integration with the Solid framework.'),
link: 'https://docs.sentry.io/platforms/javascript/guides/solid/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Performance Monitoring'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/solid/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx
index ca1001a208989e..55b1f1545575c9 100644
--- a/static/app/gettingStartedDocs/javascript/svelte.tsx
+++ b/static/app/gettingStartedDocs/javascript/svelte.tsx
@@ -204,22 +204,6 @@ const onboarding: OnboardingConfig = {
),
link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/features/',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/session-replay/',
- },
],
};
diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx
index 9c78a2e5a14114..95eb396a45ce9a 100644
--- a/static/app/gettingStartedDocs/javascript/vue.tsx
+++ b/static/app/gettingStartedDocs/javascript/vue.tsx
@@ -28,7 +28,6 @@ import {
getReplayConfigureDescription,
getReplayVerifyStep,
} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
-import {ProductSolution} from 'sentry/components/onboarding/productSelection';
import {t, tct} from 'sentry/locale';
export enum VueVersion {
@@ -122,30 +121,6 @@ const getInstallConfig = () => [
},
];
-const getNextStep = (
- params: Params
-): {
- description: string;
- id: string;
- link: string;
- name: string;
-}[] => {
- let nextStepDocs = [...nextSteps];
-
- if (params.isPerformanceSelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.PERFORMANCE_MONITORING
- );
- }
-
- if (params.isReplaySelected) {
- nextStepDocs = nextStepDocs.filter(
- step => step.id !== ProductSolution.SESSION_REPLAY
- );
- }
- return nextStepDocs;
-};
-
const onboarding: OnboardingConfig = {
introduction: params => (
@@ -201,34 +176,16 @@ const onboarding: OnboardingConfig = {
],
},
],
- nextSteps: params => getNextStep(params),
+ nextSteps: () => [
+ {
+ id: 'vue-features',
+ name: t('Vue Features'),
+ description: t('Learn about our first class integration with the Vue framework.'),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/vue/features/',
+ },
+ ],
};
-export const nextSteps = [
- {
- id: 'vue-features',
- name: t('Vue Features'),
- description: t('Learn about our first class integration with the Vue framework.'),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/features/',
- },
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Track down transactions to connect the dots between 10-second page loads and poor-performing API calls or slow database queries.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/tracing/',
- },
- {
- id: 'session-replay',
- name: t('Session Replay'),
- description: t(
- 'Get to the root cause of an error or latency issue faster by seeing all the technical details related to that issue in one visual replay on your web application.'
- ),
- link: 'https://docs.sentry.io/platforms/javascript/guides/vue/session-replay/',
- },
-];
-
function getSiblingImportsSetupConfiguration(siblingOption: string): string {
switch (siblingOption) {
case VueVersion.VUE3:
diff --git a/static/app/gettingStartedDocs/kotlin/kotlin.tsx b/static/app/gettingStartedDocs/kotlin/kotlin.tsx
index 16b048b5021f7f..94fafbfeb1351e 100644
--- a/static/app/gettingStartedDocs/kotlin/kotlin.tsx
+++ b/static/app/gettingStartedDocs/kotlin/kotlin.tsx
@@ -263,14 +263,6 @@ const onboarding: OnboardingConfig = {
description: t('Check out our sample applications.'),
link: 'https://github.com/getsentry/sentry-java/tree/main/sentry-samples',
},
- {
- id: 'performance-monitoring',
- name: t('Tracing'),
- description: t(
- 'Stay ahead of latency issues and trace every slow transaction to a poor-performing API call or database query.'
- ),
- link: 'https://docs.sentry.io/platforms/java/tracing/',
- },
],
};
From 31e8f1aa28e2295343920911f3a2c4baa247787a Mon Sep 17 00:00:00 2001
From: Priscila Oliveira
Date: Thu, 3 Oct 2024 09:44:32 +0200
Subject: [PATCH 055/139] ref(onboarding): Improve Flutter Getting Started Doc
(#78453)
---
.../gettingStartedDocs/flutter/flutter.tsx | 122 ++++++++++++------
1 file changed, 80 insertions(+), 42 deletions(-)
diff --git a/static/app/gettingStartedDocs/flutter/flutter.tsx b/static/app/gettingStartedDocs/flutter/flutter.tsx
index 29d07078bdec05..28defb7312558e 100644
--- a/static/app/gettingStartedDocs/flutter/flutter.tsx
+++ b/static/app/gettingStartedDocs/flutter/flutter.tsx
@@ -1,3 +1,7 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import {Alert} from 'sentry/components/alert';
import ExternalLink from 'sentry/components/links/externalLink';
import Link from 'sentry/components/links/link';
import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
@@ -19,7 +23,6 @@ dependencies:
sentry_flutter: ^${getPackageVersion(params, 'sentry.dart.flutter', '7.8.0')}`;
const getConfigureSnippet = (params: Params) => `
-import 'package:flutter/widgets.dart';
import 'package:sentry_flutter/sentry_flutter.dart';
Future main() async {
@@ -41,37 +44,42 @@ Future main() async {
: ''
}
},
- appRunner: () => runApp(MyApp()),
+ appRunner: () => runApp(const MyApp()),
);
// or define SENTRY_DSN via Dart environment variable (--dart-define)
}`;
-const getVerifySnippet = () => `
-import 'package:sentry/sentry.dart';
+const configureAdditionalInfo = tct(
+ 'You can configure the [code: SENTRY_DSN], [code: SENTRY_RELEASE], [code: SENTRY_DIST], and [code: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [code: --dart-define] flag to the compiler, as noted in the code sample.',
+ {
+ code:
,
+ }
+);
-try {
- aMethodThatMightFail();
-} catch (exception, stackTrace) {
- await Sentry.captureException(
- exception,
- stackTrace: stackTrace,
- );
-}`;
+const getVerifySnippet = () => `
+child: ElevatedButton(
+ onPressed: () {
+ throw Exception('This is test exception');
+ },
+ child: const Text('Verify Sentry Setup'),
+)
+`;
const getPerformanceSnippet = () => `
import 'package:sentry/sentry.dart';
-import { getPackageVersion } from 'sentry/utils/gettingStartedDocs/getPackageVersion';
-final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
+void execute() async {
+ final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
-try {
- await processOrderBatch(transaction);
-} catch (exception) {
- transaction.throwable = exception;
- transaction.status = SpanStatus.internalError();
-} finally {
- await transaction.finish();
+ try {
+ await processOrderBatch(transaction);
+ } catch (exception) {
+ transaction.throwable = exception;
+ transaction.status = const SpanStatus.internalError();
+ } finally {
+ await transaction.finish();
+ }
}
Future processOrderBatch(ISentrySpan span) async {
@@ -82,7 +90,7 @@ Future processOrderBatch(ISentrySpan span) async {
// omitted code
} catch (exception) {
innerSpan.throwable = exception;
- innerSpan.status = SpanStatus.notFound();
+ innerSpan.status = const SpanStatus.notFound();
} finally {
await innerSpan.finish();
}
@@ -215,9 +223,16 @@ const onboarding: OnboardingConfig = {
),
configurations: [
{
- language: 'yml',
- partialLoading: params.sourcePackageRegistries?.isLoading,
- code: getInstallSnippet(params),
+ code: [
+ {
+ label: 'YAML',
+ value: 'yaml',
+ language: 'yaml',
+ filename: 'pubspec.yaml',
+ partialLoading: params.sourcePackageRegistries?.isLoading,
+ code: getInstallSnippet(params),
+ },
+ ],
},
],
},
@@ -239,13 +254,26 @@ const onboarding: OnboardingConfig = {
]
: []),
{
- language: 'dart',
- code: getConfigureSnippet(params),
- additionalInfo: tct(
- 'You can configure the [code: SENTRY_DSN], [code: SENTRY_RELEASE], [code: SENTRY_DIST], and [code: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [code: --dart-define] flag to the compiler, as noted in the code sample.',
+ code: [
{
- code:
,
- }
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ filename: 'main.dart',
+ code: getConfigureSnippet(params),
+ },
+ ],
+ additionalInfo: params.isPerformanceSelected ? (
+
+ {configureAdditionalInfo}
+
+ {t(
+ 'To monitor performance, you need to add extra instrumentation as described in the Tracing section below.'
+ )}
+
+
+ ) : (
+ configureAdditionalInfo
),
},
],
@@ -255,18 +283,18 @@ const onboarding: OnboardingConfig = {
{
type: StepType.VERIFY,
description: t(
- 'Create an intentional error, so you can test that everything is working:'
+ 'Create an intentional error, so you can test that everything is working. In the example below, pressing the button will throw an exception:'
),
configurations: [
{
- language: 'dart',
- code: getVerifySnippet(),
- additionalInfo: tct(
- "If you're new to Sentry, use the email alert to access your account and complete a product tour.[break] If you're an existing user and have disabled alerts, you won't receive this email.",
+ code: [
{
- break: ,
- }
- ),
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ code: getVerifySnippet(),
+ },
+ ],
},
],
},
@@ -279,10 +307,16 @@ const onboarding: OnboardingConfig = {
),
configurations: [
{
- language: 'dart',
- code: getPerformanceSnippet(),
+ code: [
+ {
+ label: 'Dart',
+ value: 'dart',
+ language: 'dart',
+ code: getPerformanceSnippet(),
+ },
+ ],
additionalInfo: tct(
- 'To learn more about the API and automatic instrumentations, check out the [perfDocs: performance documentation].',
+ 'To learn more about the API and automatic instrumentations, check out the [perfDocs: tracing documentation].',
{
perfDocs: (
@@ -321,3 +355,7 @@ const docs: Docs = {
};
export default docs;
+
+const AlertWithoutMarginBottom = styled(Alert)`
+ margin-bottom: 0;
+`;
From 6e5bcccb77dc61ca6d12b77ae3e922dcc1acab3e Mon Sep 17 00:00:00 2001
From: ArthurKnaus
Date: Thu, 3 Oct 2024 09:45:12 +0200
Subject: [PATCH 056/139] feat(wizard-ui): Add submit endpoint (#78368)
Add endpoint to populate the wizard cache with the selected project.
closes https://github.com/getsentry/sentry/issues/78313
---
src/sentry/web/frontend/setup_wizard.py | 114 +++++++++++---
.../sentry/web/frontend/test_setup_wizard.py | 141 ++++++++++++++++++
2 files changed, 231 insertions(+), 24 deletions(-)
diff --git a/src/sentry/web/frontend/setup_wizard.py b/src/sentry/web/frontend/setup_wizard.py
index 4c54deefa6a6fa..ec8f2022b12173 100644
--- a/src/sentry/web/frontend/setup_wizard.py
+++ b/src/sentry/web/frontend/setup_wizard.py
@@ -6,8 +6,9 @@
from urllib.parse import parse_qsl, urlparse, urlunparse
from django.conf import settings
-from django.http import HttpRequest, HttpResponse
+from django.http import Http404, HttpRequest, HttpResponse, HttpResponseBadRequest
from django.http.response import HttpResponseBase
+from django.shortcuts import get_object_or_404
from sentry.api.endpoints.setup_wizard import SETUP_WIZARD_CACHE_KEY, SETUP_WIZARD_CACHE_TIMEOUT
from sentry.api.serializers import serialize
@@ -19,12 +20,14 @@
from sentry.models.organizationmapping import OrganizationMapping
from sentry.models.organizationmembermapping import OrganizationMemberMapping
from sentry.models.orgauthtoken import OrgAuthToken
+from sentry.projects.services.project.model import RpcProject
from sentry.projects.services.project.service import project_service
-from sentry.projects.services.project_key.model import ProjectKeyRole
+from sentry.projects.services.project_key.model import ProjectKeyRole, RpcProjectKey
from sentry.projects.services.project_key.service import project_key_service
from sentry.types.token import AuthTokenType
from sentry.users.models.user import User
from sentry.users.services.user.model import RpcUser
+from sentry.utils import json
from sentry.utils.http import absolute_uri
from sentry.utils.security.orgauthtoken_token import (
SystemUrlPrefixMissingException,
@@ -84,14 +87,7 @@ def get(self, request: HttpRequest, wizard_hash) -> HttpResponseBase:
org_mappings_map = {}
for mapping in org_mappings:
region_data_map[mapping.region_name]["org_ids"].append(mapping.organization_id)
- status = OrganizationStatus(mapping.status)
- serialized_mapping = {
- "id": mapping.organization_id,
- "name": mapping.name,
- "slug": mapping.slug,
- "region": mapping.region_name,
- "status": {"id": status.name.lower(), "name": status.label},
- }
+ serialized_mapping = serialize_org_mapping(mapping)
org_mappings_map[mapping.organization_id] = serialized_mapping
for region_name, region_data in region_data_map.items():
@@ -111,25 +107,18 @@ def get(self, request: HttpRequest, wizard_hash) -> HttpResponseBase:
)
region_data["keys"] = keys
for key in region_data["keys"]:
- serialized_key = {
- "dsn": {"public": key.dsn_public},
- "isActive": key.is_active,
- }
+ serialized_key = serialize_project_key(key)
keys_map[key.project_id].append(serialized_key)
filled_projects = []
for region_name, region_data in region_data_map.items():
for project in region_data["projects"]:
- enriched_project = {
- "slug": project.slug,
- "id": project.id,
- "name": project.name,
- "platform": project.platform,
- "status": STATUS_LABELS.get(project.status, "unknown"),
- }
- # The wizard only reads the a few fields so serializing the mapping should work fine
- enriched_project["organization"] = org_mappings_map[project.organization_id]
- enriched_project["keys"] = keys_map[project.id]
+ enriched_project = serialize_project(
+ project=project,
+ # The wizard only reads the a few fields so serializing the mapping should work fine
+ organization=org_mappings_map[project.organization_id],
+ keys=keys_map[project.id],
+ )
filled_projects.append(enriched_project)
# Fetching or creating a token
@@ -143,6 +132,83 @@ def get(self, request: HttpRequest, wizard_hash) -> HttpResponseBase:
context["organizations"] = list(org_mappings_map.values())
return render_to_response("sentry/setup-wizard.html", context, request)
+ def post(self, request: HttpRequest, wizard_hash=None) -> HttpResponse:
+ """
+ This updates the cache content for a specific hash
+ """
+ json_data = json.loads(request.body)
+ organization_id = json_data.get("organizationId", None)
+ project_id = json_data.get("projectId", None)
+
+ if organization_id is None or project_id is None or wizard_hash is None:
+ return HttpResponseBadRequest()
+
+ member_org_ids = OrganizationMemberMapping.objects.filter(
+ user_id=request.user.id
+ ).values_list("organization_id", flat=True)
+ mapping = get_object_or_404(
+ OrganizationMapping,
+ organization_id=organization_id,
+ organization_id__in=member_org_ids,
+ )
+
+ project = project_service.get_by_id(organization_id=mapping.organization_id, id=project_id)
+ if project is None:
+ raise Http404()
+
+ project_key = project_key_service.get_project_key(
+ organization_id=mapping.organization_id,
+ project_id=project.id,
+ role=ProjectKeyRole.store,
+ )
+ if project_key is None:
+ raise Http404()
+
+ serialized_token = get_org_token(mapping, request.user)
+
+ enriched_project = serialize_project(
+ project=project,
+ # The wizard only reads the a few fields so serializing the mapping should work fine
+ organization=serialize_org_mapping(mapping),
+ keys=[serialize_project_key(project_key)],
+ )
+
+ cache_data = {"apiKeys": serialized_token, "projects": [enriched_project]}
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
+ default_cache.set(key, cache_data, SETUP_WIZARD_CACHE_TIMEOUT)
+ return HttpResponse(status=200)
+
+
+def serialize_org_mapping(mapping: OrganizationMapping):
+ status = OrganizationStatus(mapping.status)
+ return {
+ "id": mapping.organization_id,
+ "name": mapping.name,
+ "slug": mapping.slug,
+ "region": mapping.region_name,
+ "status": {"id": status.name.lower(), "name": status.label},
+ }
+
+
+def serialize_project_key(project_key: RpcProjectKey):
+ return {
+ "dsn": {"public": project_key.dsn_public},
+ "isActive": project_key.is_active,
+ }
+
+
+def serialize_project(project: RpcProject, organization: dict, keys: list[dict]):
+ return {
+ "slug": project.slug,
+ "id": project.id,
+ "name": project.name,
+ "platform": project.platform,
+ "status": STATUS_LABELS.get(project.status, "unknown"),
+ "organization": organization,
+ "keys": keys,
+ }
+
def get_token(mappings: list[OrganizationMapping], user: RpcUser):
can_use_org_tokens = len(mappings) == 1
diff --git a/tests/sentry/web/frontend/test_setup_wizard.py b/tests/sentry/web/frontend/test_setup_wizard.py
index 2fab385c55ae0c..3abe9ea763a87b 100644
--- a/tests/sentry/web/frontend/test_setup_wizard.py
+++ b/tests/sentry/web/frontend/test_setup_wizard.py
@@ -159,3 +159,144 @@ def test_redirect_to_login_if_no_query_param(self):
assert resp.status_code == 302
assert resp.headers["Location"] == "/auth/login/"
+
+ def test_post_success(self):
+ self.org = self.create_organization(owner=self.user)
+ self.team = self.create_team(organization=self.org, name="Mariachi Band")
+ self.project = self.create_project(organization=self.org, teams=[self.team], name="Bengal")
+ # create another project to make sure only the submitted project is in the cache
+ self.create_project(organization=self.org, teams=[self.team], name="Bengal2")
+ self.login_as(self.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}abc"
+ default_cache.set(key, "test", 600)
+
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": self.org.id, "projectId": self.project.id},
+ content_type="application/json",
+ )
+
+ assert resp.status_code == 200
+ cached = default_cache.get(key)
+ assert cached.get("apiKeys").get("scopes")[0] == "org:ci"
+
+ # The submitted project should be the only one in the cache
+ assert len(cached.get("projects")) == 1
+ cached_project = cached.get("projects")[0]
+ assert cached_project.get("id") == self.project.id
+
+ assert cached_project.get("status") == "active"
+ assert cached_project.get("keys")[0].get("isActive")
+ assert cached_project.get("organization").get("status").get("id") == "active"
+
+ def test_post_bad_request(self):
+ self.login_as(self.user)
+
+ # missing organizationId
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"projectId": 123},
+ content_type="application/json",
+ )
+ assert resp.status_code == 400
+
+ # missing projectId
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": 123},
+ content_type="application/json",
+ )
+ assert resp.status_code == 400
+
+ def test_post_project_not_found(self):
+ self.org = self.create_organization(owner=self.user)
+ self.login_as(self.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}abc"
+ default_cache.set(key, "test", 600)
+
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": self.org.id, "projectId": 1234},
+ content_type="application/json",
+ )
+
+ assert resp.status_code == 404
+
+ def test_organization_not_found(self):
+ self.login_as(self.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}abc"
+ default_cache.set(key, "test", 600)
+
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": 1234, "projectId": 1234},
+ content_type="application/json",
+ )
+
+ assert resp.status_code == 404
+
+ def test_organization_without_membership(self):
+ self.org = self.create_organization()
+ self.project = self.create_project(organization=self.org)
+ self.login_as(self.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}abc"
+ default_cache.set(key, "test", 600)
+
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": self.org.id, "projectId": self.project.id},
+ content_type="application/json",
+ )
+
+ assert resp.status_code == 404
+
+ def test_post_project_not_in_org(self):
+ self.org = self.create_organization(owner=self.user)
+ self.project = self.create_project()
+ self.login_as(self.user)
+
+ key = f"{SETUP_WIZARD_CACHE_KEY}abc"
+ default_cache.set(key, "test", 600)
+
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "abc"})
+ resp = self.client.post(
+ path=url,
+ data={"organizationId": self.org.id, "projectId": self.project.id},
+ content_type="application/json",
+ )
+
+ assert resp.status_code == 404
+
+ @override_settings(SENTRY_SIGNUP_URL="https://sentry.io/signup/")
+ def test_post_redirect_to_signup(self):
+ self.create_organization(owner=self.user)
+ url = (
+ reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "xyz"})
+ + "?signup=1&test=other"
+ )
+ resp = self.client.post(url)
+
+ assert resp.status_code == 302
+ assert (
+ resp.headers["Location"]
+ == "https://sentry.io/signup/?next=http%3A%2F%2Ftestserver%2Faccount%2Fsettings%2Fwizard%2Fxyz%2F&test=other"
+ )
+
+ @override_settings(SENTRY_SIGNUP_URL="https://sentry.io/signup/")
+ def test_post_redirect_to_login_if_no_query_param(self):
+ self.create_organization(owner=self.user)
+ url = reverse("sentry-project-wizard-fetch", kwargs={"wizard_hash": "xyz"})
+ resp = self.client.post(url)
+
+ assert resp.status_code == 302
+ assert resp.headers["Location"] == "/auth/login/"
From 42cb4be9ef2e9460cc1f02f39e87045358031e9f Mon Sep 17 00:00:00 2001
From: Joris Bayer
Date: Thu, 3 Oct 2024 09:46:34 +0200
Subject: [PATCH 057/139] feat(spans): Config stub for span.op inference
(#77851)
Add a first rule to derive missing `span.op`s in Relay.
---
src/sentry/relay/globalconfig.py | 32 +++++++++++++++++++++++++++++++-
1 file changed, 31 insertions(+), 1 deletion(-)
diff --git a/src/sentry/relay/globalconfig.py b/src/sentry/relay/globalconfig.py
index d9d1e81c8a0041..6e58750ec3bdfa 100644
--- a/src/sentry/relay/globalconfig.py
+++ b/src/sentry/relay/globalconfig.py
@@ -7,7 +7,7 @@
MetricExtractionGroups,
global_metric_extraction_groups,
)
-from sentry.relay.types import GenericFiltersConfig
+from sentry.relay.types import GenericFiltersConfig, RuleCondition
from sentry.utils import metrics
# List of options to include in the global config.
@@ -28,11 +28,21 @@
]
+class SpanOpDefaultRule(TypedDict):
+ condition: RuleCondition
+ value: str
+
+
+class SpanOpDefaults(TypedDict):
+ rules: list[SpanOpDefaultRule]
+
+
class GlobalConfig(TypedDict, total=False):
measurements: MeasurementsConfig
aiModelCosts: AIModelCosts
metricExtraction: MetricExtractionGroups
filters: GenericFiltersConfig | None
+ spanOpDefaults: SpanOpDefaults
options: dict[str, Any]
@@ -43,6 +53,25 @@ def get_global_generic_filters() -> GenericFiltersConfig:
}
+def span_op_defaults() -> SpanOpDefaults:
+ return {
+ "rules": [
+ {
+ # If span.data[messaging.system] is set, use span.op "message":
+ "condition": {
+ "op": "not",
+ "inner": {
+ "op": "eq",
+ "name": "span.data.messaging\\.system",
+ "value": None,
+ },
+ },
+ "value": "message",
+ }
+ ]
+ }
+
+
@metrics.wraps("relay.globalconfig.get")
def get_global_config():
"""Return the global configuration for Relay."""
@@ -51,6 +80,7 @@ def get_global_config():
"measurements": get_measurements_config(),
"aiModelCosts": ai_model_costs_config(),
"metricExtraction": global_metric_extraction_groups(),
+ "spanOpDefaults": span_op_defaults(),
}
filters = get_global_generic_filters()
From 893982eb8a9ebb377c35e3c9f5b33009bbebe8ce Mon Sep 17 00:00:00 2001
From: Markus Hintersteiner
Date: Thu, 3 Oct 2024 11:23:43 +0200
Subject: [PATCH 058/139] feat(insights): Show detail panel when clicking on
top-level cards (#78327)
Show a detail panel when clicking on a mobile screens metric. If a
metric has no grading, no background color is applied to the card as
well (previously it was greyed out).
---
.../components/screensOverviewTable.tsx | 4 +-
.../mobile/screens/components/vitalCard.tsx | 17 ++-
.../components/vitalDetailPanel.spec.tsx | 80 +++++++++++
.../screens/components/vitalDetailPanel.tsx | 97 +++++++++++++
.../views/insights/mobile/screens/utils.ts | 40 +++++-
.../screens/views/screensLandingPage.tsx | 135 ++++++++++++++----
6 files changed, 334 insertions(+), 39 deletions(-)
create mode 100644 static/app/views/insights/mobile/screens/components/vitalDetailPanel.spec.tsx
create mode 100644 static/app/views/insights/mobile/screens/components/vitalDetailPanel.tsx
diff --git a/static/app/views/insights/mobile/screens/components/screensOverviewTable.tsx b/static/app/views/insights/mobile/screens/components/screensOverviewTable.tsx
index 0560bcf068c5bf..481243673060c0 100644
--- a/static/app/views/insights/mobile/screens/components/screensOverviewTable.tsx
+++ b/static/app/views/insights/mobile/screens/components/screensOverviewTable.tsx
@@ -87,12 +87,12 @@ function ScreensOverviewTable({data, eventView, isLoading, pageLinks}: Props) {
pageLinks={pageLinks}
columnOrder={[
'transaction',
+ 'avg(measurements.app_start_cold)',
+ 'avg(measurements.app_start_warm)',
`avg(mobile.slow_frames)`,
`avg(mobile.frozen_frames)`,
`avg(measurements.time_to_initial_display)`,
`avg(measurements.time_to_full_display)`,
- 'avg(measurements.app_start_cold)',
- 'avg(measurements.app_start_warm)',
`count()`,
]}
defaultSort={[
diff --git a/static/app/views/insights/mobile/screens/components/vitalCard.tsx b/static/app/views/insights/mobile/screens/components/vitalCard.tsx
index 204e88f1ad1a11..fc0875464e3492 100644
--- a/static/app/views/insights/mobile/screens/components/vitalCard.tsx
+++ b/static/app/views/insights/mobile/screens/components/vitalCard.tsx
@@ -7,16 +7,24 @@ import {PERFORMANCE_SCORE_COLORS} from 'sentry/views/insights/browser/webVitals/
type Props = {
description: string;
- formattedValue: React.ReactNode;
+ formattedValue: string | undefined;
status: string | undefined;
statusLabel: string | undefined;
title: string;
+ onClick?: () => void;
};
-function VitalCard({description, formattedValue, status, statusLabel, title}: Props) {
+function VitalCard({
+ description,
+ formattedValue,
+ status,
+ statusLabel,
+ title,
+ onClick,
+}: Props) {
return (
-
+
{description && (
`
color: ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].normal]};
border-radius: 0 0 ${p => p.theme.borderRadius} ${p => p.theme.borderRadius};
- background-color: ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].light]};
+ background-color: ${p =>
+ p.status === 'none' ? 'none' : p.theme[PERFORMANCE_SCORE_COLORS[p.status].light]};
border: solid 1px ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].light]};
font-size: ${p => p.theme.fontSizeExtraSmall};
padding: ${space(0.5)};
diff --git a/static/app/views/insights/mobile/screens/components/vitalDetailPanel.spec.tsx b/static/app/views/insights/mobile/screens/components/vitalDetailPanel.spec.tsx
new file mode 100644
index 00000000000000..6fd3c2a3b03440
--- /dev/null
+++ b/static/app/views/insights/mobile/screens/components/vitalDetailPanel.spec.tsx
@@ -0,0 +1,80 @@
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import {DiscoverDatasets} from 'sentry/utils/discover/types';
+import {PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert';
+import {
+ PerformanceScore,
+ type VitalItem,
+ type VitalStatus,
+} from 'sentry/views/insights/mobile/screens/utils';
+
+import {VitalDetailPanel} from './vitalDetailPanel';
+
+jest.mock('sentry/views/insights/mobile/common/queries/useCrossPlatformProject', () => ({
+ __esModule: true,
+ default: () => ({selectedPlatform: 'Android'}),
+}));
+
+const mockStatus: VitalStatus = {
+ formattedValue: '100ms',
+ score: PerformanceScore.GOOD,
+ description: 'Good performance',
+ value: {
+ type: 'duration',
+ unit: 'ms',
+ value: 100,
+ },
+};
+
+const mockVital: VitalItem = {
+ title: 'title',
+ description: 'description',
+ docs: 'docs',
+ setup: 'setup',
+ platformDocLinks: {
+ Android: 'https://example.com/platform-docs',
+ },
+ sdkDocLinks: {
+ Android: 'https://example.com/sdk-docs',
+ },
+ field: 'avg(measurements.app_start_cold)',
+ dataset: DiscoverDatasets.METRICS,
+ getStatus: () => mockStatus,
+};
+
+describe('VitalDetailPanel', () => {
+ test('renders correctly with given props', () => {
+ render(
+
+
+
+ );
+
+ expect(screen.getByText('title')).toBeInTheDocument();
+ expect(screen.getByText('100ms')).toBeInTheDocument();
+ expect(screen.getByText('Good performance')).toBeInTheDocument();
+ expect(screen.getByText('docs')).toBeInTheDocument();
+ expect(screen.getByText('setup')).toBeInTheDocument();
+ expect(screen.getByText('Sentry SDK documentation')).toHaveAttribute(
+ 'href',
+ 'https://example.com/sdk-docs'
+ );
+ expect(screen.getByText('Platform documentation')).toHaveAttribute(
+ 'href',
+ 'https://example.com/platform-docs'
+ );
+ });
+
+ test('calls onClose when close action is triggered', async () => {
+ const onCloseMock = jest.fn();
+ render(
+
+
+
+ );
+
+ const closeButton = screen.getByLabelText('Close Details');
+ await userEvent.click(closeButton);
+ expect(onCloseMock).toHaveBeenCalled();
+ });
+});
diff --git a/static/app/views/insights/mobile/screens/components/vitalDetailPanel.tsx b/static/app/views/insights/mobile/screens/components/vitalDetailPanel.tsx
new file mode 100644
index 00000000000000..6ae37bf48a96cf
--- /dev/null
+++ b/static/app/views/insights/mobile/screens/components/vitalDetailPanel.tsx
@@ -0,0 +1,97 @@
+import React from 'react';
+import styled from '@emotion/styled';
+
+import ExternalLink from 'sentry/components/links/externalLink';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert';
+import {PERFORMANCE_SCORE_COLORS} from 'sentry/views/insights/browser/webVitals/utils/performanceScoreColors';
+import DetailPanel from 'sentry/views/insights/common/components/detailPanel';
+import useCrossPlatformProject from 'sentry/views/insights/mobile/common/queries/useCrossPlatformProject';
+import {
+ PerformanceScore,
+ type VitalItem,
+ type VitalStatus,
+} from 'sentry/views/insights/mobile/screens/utils';
+
+export function VitalDetailPanel({
+ vital,
+ status,
+ onClose,
+}: {
+ onClose: () => void;
+ status: VitalStatus | undefined;
+ vital: VitalItem | undefined;
+}) {
+ const {selectedPlatform} = useCrossPlatformProject();
+
+ const platformDocsLink = vital?.platformDocLinks[selectedPlatform];
+ const sdkDocsLink = vital?.sdkDocLinks[selectedPlatform];
+
+ return (
+
+
+ {vital && (
+
+ {vital.title}
+ {status && (
+
+ {status.formattedValue ?? '-'}{' '}
+ {status.score !== PerformanceScore.NONE && (
+ {status.description}
+ )}
+
+ )}
+ {vital.docs}
+ {vital.setup && {vital.setup}
}
+ {(platformDocsLink || sdkDocsLink) && (
+
+ {t('Learn more')}
+
+ {sdkDocsLink && (
+
+
+ {t('Sentry SDK documentation')}
+
+
+ )}
+ {platformDocsLink && (
+
+
+ {t('Platform documentation')}
+
+
+ )}
+
+
+ )}
+
+ )}
+
+
+
+ );
+}
+
+const VitalDetailTitle = styled('h4')`
+ margin-bottom: ${space(1)};
+ margin-top: 40px;
+`;
+
+const Badge = styled('div')<{status: string}>`
+ white-space: nowrap;
+ border-radius: 12px;
+ color: ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].normal]};
+ background-color: ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].light]};
+ border: solid 1px ${p => p.theme[PERFORMANCE_SCORE_COLORS[p.status].light]};
+ font-size: ${p => p.theme.fontSizeSmall};
+ padding: 0 ${space(1)};
+ display: inline-block;
+ height: 17px;
+ vertical-align: middle;
+`;
+
+const SubHeading = styled('div')`
+ font-weight: ${p => p.theme.fontWeightBold};
+ margin-bottom: ${space(1)};
+`;
diff --git a/static/app/views/insights/mobile/screens/utils.ts b/static/app/views/insights/mobile/screens/utils.ts
index e598a695a96e64..72b8044fbf7a68 100644
--- a/static/app/views/insights/mobile/screens/utils.ts
+++ b/static/app/views/insights/mobile/screens/utils.ts
@@ -1,7 +1,22 @@
import {DURATION_UNITS} from 'sentry/utils/discover/fieldRenderers';
import type {DiscoverDatasets} from 'sentry/utils/discover/types';
+import getDuration from 'sentry/utils/duration/getDuration';
import {VitalState} from 'sentry/views/performance/vitalDetail/utils';
+const formatMetricValue = (metric: MetricValue): string => {
+ if (typeof metric.value === 'number' && metric.type === 'duration' && metric.unit) {
+ const seconds =
+ (metric.value * ((metric.unit && DURATION_UNITS[metric.unit]) ?? 1)) / 1000;
+ return getDuration(seconds, 2, true);
+ }
+
+ if (typeof metric.value === 'number' && metric.type === 'number') {
+ return metric.value.toFixed(2);
+ }
+
+ return String(metric.value);
+};
+
// maps to PERFORMANCE_SCORE_COLORS keys
export enum PerformanceScore {
GOOD = 'good',
@@ -12,14 +27,20 @@ export enum PerformanceScore {
export type VitalStatus = {
description: string | undefined;
+ formattedValue: string | undefined;
score: PerformanceScore;
+ value: MetricValue | undefined;
};
export type VitalItem = {
dataset: DiscoverDatasets;
description: string;
+ docs: React.ReactNode;
field: string;
getStatus: (value: MetricValue) => VitalStatus;
+ platformDocLinks: Record;
+ sdkDocLinks: Record;
+ setup: React.ReactNode | undefined;
title: string;
};
@@ -36,10 +57,12 @@ export type MetricValue = {
export const STATUS_UNKNOWN: VitalStatus = {
description: undefined,
+ formattedValue: undefined,
+ value: undefined,
score: PerformanceScore.NONE,
};
-export function getColdAppStartPerformance(metric: MetricValue) {
+export function getColdAppStartPerformance(metric: MetricValue): VitalStatus {
let description = '';
let status = PerformanceScore.NONE;
@@ -59,12 +82,14 @@ export function getColdAppStartPerformance(metric: MetricValue) {
}
}
return {
+ value: metric,
+ formattedValue: formatMetricValue(metric),
score: status,
description: description,
};
}
-export function getWarmAppStartPerformance(metric: MetricValue) {
+export function getWarmAppStartPerformance(metric: MetricValue): VitalStatus {
let description = '';
let status = PerformanceScore.NONE;
@@ -84,11 +109,18 @@ export function getWarmAppStartPerformance(metric: MetricValue) {
}
}
return {
+ value: metric,
+ formattedValue: formatMetricValue(metric),
score: status,
description: description,
};
}
-export function getDefaultMetricPerformance(_: MetricValue) {
- return STATUS_UNKNOWN;
+export function getDefaultMetricPerformance(metric: MetricValue): VitalStatus {
+ return {
+ description: undefined,
+ formattedValue: formatMetricValue(metric),
+ value: metric,
+ score: PerformanceScore.NONE,
+ };
}
diff --git a/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx b/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
index e290d4984ea707..22c5b37f779672 100644
--- a/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
+++ b/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
@@ -1,13 +1,13 @@
-import {useCallback} from 'react';
+import {useCallback, useState} from 'react';
import styled from '@emotion/styled';
import omit from 'lodash/omit';
import {Breadcrumbs} from 'sentry/components/breadcrumbs';
import ButtonBar from 'sentry/components/buttonBar';
-import Duration from 'sentry/components/duration';
import ErrorBoundary from 'sentry/components/errorBoundary';
import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton';
import * as Layout from 'sentry/components/layouts/thirds';
+import {TabbedCodeSnippet} from 'sentry/components/onboarding/gettingStartedDoc/step';
import {DatePageFilter} from 'sentry/components/organizations/datePageFilter';
import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter';
import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
@@ -19,9 +19,7 @@ import type {NewQuery} from 'sentry/types/organization';
import {browserHistory} from 'sentry/utils/browserHistory';
import {useDiscoverQuery} from 'sentry/utils/discover/discoverQuery';
import EventView from 'sentry/utils/discover/eventView';
-import {DURATION_UNITS} from 'sentry/utils/discover/fieldRenderers';
import {DiscoverDatasets} from 'sentry/utils/discover/types';
-import {formatFloat} from 'sentry/utils/number/formatFloat';
import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert';
import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import {useLocation} from 'sentry/utils/useLocation';
@@ -31,8 +29,10 @@ import {ModulePageProviders} from 'sentry/views/insights/common/components/modul
import {useModuleBreadcrumbs} from 'sentry/views/insights/common/utils/useModuleBreadcrumbs';
import useCrossPlatformProject from 'sentry/views/insights/mobile/common/queries/useCrossPlatformProject';
import {PlatformSelector} from 'sentry/views/insights/mobile/screenload/components/platformSelector';
+import {SETUP_CONTENT as TTFD_SETUP} from 'sentry/views/insights/mobile/screenload/data/setupContent';
import {ScreensOverview} from 'sentry/views/insights/mobile/screens/components/screensOverview';
import VitalCard from 'sentry/views/insights/mobile/screens/components/vitalCard';
+import {VitalDetailPanel} from 'sentry/views/insights/mobile/screens/components/vitalDetailPanel';
import {Referrer} from 'sentry/views/insights/mobile/screens/referrers';
import {
MODULE_DESCRIPTION,
@@ -47,6 +47,7 @@ import {
type MetricValue,
STATUS_UNKNOWN,
type VitalItem,
+ type VitalStatus,
} from 'sentry/views/insights/mobile/screens/utils';
import {MobileHeader} from 'sentry/views/insights/pages/mobile/mobilePageHeader';
import {useDomainViewFilters} from 'sentry/views/insights/pages/useFilters';
@@ -59,7 +60,6 @@ export function ScreensLandingPage() {
const location = useLocation();
const organization = useOrganization();
const {isProjectCrossPlatform, selectedPlatform} = useCrossPlatformProject();
- // const {primaryRelease, secondaryRelease} = useReleaseSelection();
const handleProjectChange = useCallback(() => {
browserHistory.replace({
@@ -75,6 +75,19 @@ export function ScreensLandingPage() {
{
title: t('Cold App Start'),
description: t('Average Cold App Start duration'),
+ docs: t(
+ 'The average cold app start duration. A cold start usually occurs when the app launched for the first time, after a reboot or an app update.'
+ ),
+ setup: undefined,
+ platformDocLinks: {
+ Android:
+ 'https://developer.android.com/topic/performance/vitals/launch-time#cold',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#app-start-instrumentation',
+ iOS: 'https://docs.sentry.io/platforms/apple/guides/ios/tracing/instrumentation/automatic-instrumentation/#app-start-tracing',
+ },
field: 'avg(measurements.app_start_cold)',
dataset: DiscoverDatasets.METRICS,
getStatus: getColdAppStartPerformance,
@@ -82,13 +95,38 @@ export function ScreensLandingPage() {
{
title: t('Warm App Start'),
description: t('Average Warm App Start duration'),
+ docs: t(
+ 'The average warm app start duration. A warm start usually occurs occurs when the app was already launched previously or the process was created beforehand.'
+ ),
+ setup: undefined,
+ platformDocLinks: {
+ Android:
+ 'https://developer.android.com/topic/performance/vitals/launch-time#warm',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#app-start-instrumentation',
+ iOS: 'https://docs.sentry.io/platforms/apple/guides/ios/tracing/instrumentation/automatic-instrumentation/#app-start-tracing',
+ },
field: 'avg(measurements.app_start_warm)',
dataset: DiscoverDatasets.METRICS,
getStatus: getWarmAppStartPerformance,
},
{
title: t('Slow Frames'),
- description: t('Average number of slow frames'),
+ description: t('Slow frames ratio'),
+ docs: t(
+ 'The number of slow frames divided by the total number of frames rendered.'
+ ),
+ setup: undefined,
+ platformDocLinks: {
+ Android: 'https://developer.android.com/topic/performance/vitals/render',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ iOS: 'https://docs.sentry.io/platforms/apple/guides/ios/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ },
field: `avg(mobile.slow_frames)`,
dataset: DiscoverDatasets.SPANS_METRICS,
getStatus: getDefaultMetricPerformance,
@@ -96,6 +134,18 @@ export function ScreensLandingPage() {
{
title: t('Frozen Frames'),
description: t('Average number of frozen frames'),
+ docs: t(
+ 'The number of frozen frames divided by the total number of frames rendered.'
+ ),
+ setup: undefined,
+ platformDocLinks: {
+ Android: 'https://developer.android.com/topic/performance/vitals/render',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ iOS: 'https://docs.sentry.io/platforms/apple/guides/ios/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ },
field: `avg(mobile.frozen_frames)`,
dataset: DiscoverDatasets.SPANS_METRICS,
getStatus: getDefaultMetricPerformance,
@@ -103,6 +153,16 @@ export function ScreensLandingPage() {
{
title: t('Frame Delay'),
description: t('Average frame delay'),
+ docs: t('The average delay divided by the total rendering time.'),
+ setup: undefined,
+ platformDocLinks: {
+ Android: 'https://developer.android.com/topic/performance/vitals/render',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ iOS: 'https://docs.sentry.io/platforms/apple/guides/ios/tracing/instrumentation/automatic-instrumentation/#slow-and-frozen-frames',
+ },
field: `avg(mobile.frames_delay)`,
dataset: DiscoverDatasets.SPANS_METRICS,
getStatus: getDefaultMetricPerformance,
@@ -110,6 +170,17 @@ export function ScreensLandingPage() {
{
title: t('TTID'),
description: t('Average time to intial display.'),
+ docs: t('The average time it takes until your app is drawing the first frame.'),
+ setup: undefined,
+ platformDocLinks: {
+ Android:
+ 'https://developer.android.com/topic/performance/vitals/launch-time#time-initial',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#time-to-initial-display',
+ iOS: 'https://docs.sentry.io/platforms/apple/features/experimental-features/',
+ },
field: `avg(measurements.time_to_initial_display)`,
dataset: DiscoverDatasets.METRICS,
getStatus: getDefaultMetricPerformance,
@@ -117,6 +188,17 @@ export function ScreensLandingPage() {
{
title: t('TTFD'),
description: t('Average time to full display.'),
+ docs: t('The average time it takes until your app is drawing the full content.'),
+ setup: ,
+ platformDocLinks: {
+ Android:
+ 'https://developer.android.com/topic/performance/vitals/launch-time#time-full',
+ },
+ sdkDocLinks: {
+ Android:
+ 'https://docs.sentry.io/platforms/android/tracing/instrumentation/automatic-instrumentation/#time-to-initial-display',
+ iOS: 'https://docs.sentry.io/platforms/apple/features/experimental-features/',
+ },
field: `avg(measurements.time_to_full_display)`,
dataset: DiscoverDatasets.METRICS,
getStatus: getDefaultMetricPerformance,
@@ -125,6 +207,10 @@ export function ScreensLandingPage() {
const metricsFields: string[] = new Array();
const spanMetricsFields: string[] = new Array();
+ const [state, setState] = useState<{
+ status: VitalStatus | undefined;
+ vital: VitalItem | undefined;
+ }>({status: undefined, vital: undefined});
vitalItems.forEach(element => {
if (element.dataset === DiscoverDatasets.METRICS) {
@@ -204,26 +290,6 @@ export function ScreensLandingPage() {
return undefined;
};
- const formattedMetricValueFor = (metric: MetricValue): React.ReactNode => {
- if (typeof metric.value === 'number' && metric.type === 'duration' && metric.unit) {
- return (
-
- );
- }
-
- if (typeof metric.value === 'number' && metric.type === 'number') {
- return {formatFloat(metric.value, 2)} ;
- }
-
- return {metric.value} ;
- };
-
return (
@@ -271,17 +337,21 @@ export function ScreensLandingPage() {
const metricValue = metricValueFor(item);
const status =
(metricValue && item.getStatus(metricValue)) ?? STATUS_UNKNOWN;
- const formattedValue: React.ReactNode =
- metricValue && formattedMetricValueFor(metricValue);
return (
{
+ setState({
+ vital: item,
+ status: status,
+ });
+ }}
key={item.field}
title={item.title}
description={item.description}
statusLabel={status.description}
status={status.score}
- formattedValue={formattedValue}
+ formattedValue={status.formattedValue}
/>
);
})}
@@ -291,6 +361,13 @@ export function ScreensLandingPage() {
+ {
+ setState({vital: undefined, status: undefined});
+ }}
+ />
From 86d47d7253b33981dc14d84b98e1b1945733035e Mon Sep 17 00:00:00 2001
From: Ogi <86684834+obostjancic@users.noreply.github.com>
Date: Thu, 3 Oct 2024 11:46:42 +0200
Subject: [PATCH 059/139] ref(onboarding): wizard onboarding layout (#78466)
---
.../onboarding/gettingStartedDoc/step.tsx | 41 +++--
.../gettingStartedDocs/javascript/astro.tsx | 4 +-
.../javascript/javascript.tsx | 4 +-
.../javascript/jsLoader/jsLoader.tsx | 4 +-
.../javascript/nextjs.spec.tsx | 15 +-
.../gettingStartedDocs/javascript/nextjs.tsx | 164 +++++++++++-------
.../javascript/remix.spec.tsx | 10 +-
.../gettingStartedDocs/javascript/remix.tsx | 67 +++++--
.../javascript/sveltekit.spec.tsx | 13 +-
.../javascript/sveltekit.tsx | 76 ++++++--
10 files changed, 261 insertions(+), 137 deletions(-)
diff --git a/static/app/components/onboarding/gettingStartedDoc/step.tsx b/static/app/components/onboarding/gettingStartedDoc/step.tsx
index 029f04c000f7ed..862d864e8afdad 100644
--- a/static/app/components/onboarding/gettingStartedDoc/step.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/step.tsx
@@ -126,15 +126,18 @@ interface BaseStepProps {
* Content that goes directly above the code snippet
*/
codeHeader?: React.ReactNode;
+ /**
+ * Whether the step instructions are collapsible
+ */
+ collapsible?: boolean;
+ /**
+ * An array of configurations to be displayed
+ */
configurations?: Configuration[];
/**
* A brief description of the step
*/
description?: React.ReactNode | React.ReactNode[];
- /**
- * Whether the step is optional
- */
- isOptional?: boolean;
/**
* Fired when the optional toggle is clicked.
* Useful for when we want to fire analytics events.
@@ -205,7 +208,7 @@ export function Step({
additionalInfo,
description,
onOptionalToggleClick,
- isOptional = false,
+ collapsible = false,
codeHeader,
}: StepProps) {
const [showOptionalConfig, setShowOptionalConfig] = useState(false);
@@ -248,25 +251,23 @@ export function Step({
);
- return isOptional ? (
+ return collapsible ? (
-
+ {
+ onOptionalToggleClick?.(!showOptionalConfig);
+ setShowOptionalConfig(!showOptionalConfig);
+ }}
+ >
+ {title ?? StepTitle[type]}
}
aria-label={t('Toggle optional configuration')}
- onClick={() => {
- onOptionalToggleClick?.(!showOptionalConfig);
- setShowOptionalConfig(!showOptionalConfig);
- }}
- >
-
- {title ?? StepTitle[type]}
- {t(' (Optional)')}
-
-
+ />
{showOptionalConfig ? config : null}
@@ -307,13 +308,15 @@ const GeneralAdditionalInfo = styled(Description)`
margin-top: ${space(2)};
`;
-const OptionalConfigWrapper = styled('div')`
+const OptionalConfigWrapper = styled('div')<{expanded: boolean}>`
display: flex;
+ gap: ${space(1)};
+ margin-bottom: ${p => (p.expanded ? space(2) : 0)};
cursor: pointer;
- margin-bottom: 0.5em;
`;
const ToggleButton = styled(Button)`
+ padding: 0;
&,
:hover {
color: ${p => p.theme.gray500};
diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx
index abc538d2595b07..1c20b807619d07 100644
--- a/static/app/gettingStartedDocs/javascript/astro.tsx
+++ b/static/app/gettingStartedDocs/javascript/astro.tsx
@@ -218,7 +218,7 @@ const replayOnboarding: OnboardingConfig = {
],
configure: (params: Params) => [
{
- type: StepType.CONFIGURE,
+ title: 'Configure Session Replay (Optional)',
description: tct(
'There are several privacy and sampling options available. Learn more about configuring Session Replay by reading the [link:configuration docs].',
{
@@ -297,7 +297,7 @@ import * as Sentry from "@sentry/astro";`,
},
],
additionalInfo: ,
- isOptional: true,
+ collapsible: true,
},
],
verify: getReplayVerifyStep(),
diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx
index 524309f46648ec..baa5e8b01805f2 100644
--- a/static/app/gettingStartedDocs/javascript/javascript.tsx
+++ b/static/app/gettingStartedDocs/javascript/javascript.tsx
@@ -197,11 +197,11 @@ const loaderScriptOnboarding: OnboardingConfig = {
],
configure: params => [
{
- type: StepType.CONFIGURE,
+ title: t('Configure SDK (Optional)'),
description: t(
"Initialize Sentry as early as possible in your application's lifecycle."
),
- isOptional: true,
+ collapsible: true,
configurations: [
{
language: 'html',
diff --git a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
index dcfdac8d8671d7..02259ed121e7bf 100644
--- a/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
+++ b/static/app/gettingStartedDocs/javascript/jsLoader/jsLoader.tsx
@@ -64,7 +64,7 @@ const replayOnboardingJsLoader: OnboardingConfig = {
install: (params: Params) => getInstallConfig(params),
configure: (params: Params) => [
{
- type: StepType.CONFIGURE,
+ title: t('Configure Session Replay (Optional)'),
description: getReplayConfigureDescription({
link: 'https://docs.sentry.io/platforms/javascript/session-replay/',
}),
@@ -74,7 +74,7 @@ const replayOnboardingJsLoader: OnboardingConfig = {
code: getReplayJsLoaderSdkSetupSnippet(params),
},
],
- isOptional: true,
+ collapsible: true,
additionalInfo: ,
},
],
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
index f216a2682d729b..9f24cae07b7500 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.spec.tsx
@@ -11,7 +11,9 @@ describe('javascript-nextjs onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
// Includes configure statement
expect(
@@ -19,7 +21,7 @@ describe('javascript-nextjs onboarding docs', function () {
).toBeInTheDocument();
});
- it('displays the configure instructions', () => {
+ it('displays the verify instructions', () => {
renderWithOnboardingLayout(docs, {
selectedProducts: [
ProductSolution.ERROR_MONITORING,
@@ -29,14 +31,7 @@ describe('javascript-nextjs onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/sentry.client.config.js/))
- ).toBeInTheDocument();
- expect(screen.queryByText(textWithMarkupMatcher(/Sentry.init/))).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/.env.sentry-build-plugin/))
- ).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/instrumentation.ts/))
+ screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx
index cf71156d3ee3a0..00e24eb868a253 100644
--- a/static/app/gettingStartedDocs/javascript/nextjs.tsx
+++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx
@@ -42,7 +42,7 @@ const getInstallConfig = (params: Params) => {
return [
{
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -78,65 +78,111 @@ const getManualInstallConfig = () => [
const onboarding: OnboardingConfig = {
install: (params: Params) => [
{
- type: StepType.INSTALL,
+ title: t('Automatic Configuration (Recommended)'),
configurations: getInstallConfig(params),
- additionalInfo: (
+ },
+ ],
+ configure: () => [
+ {
+ title: t('Manual Configuration'),
+ collapsible: true,
+ configurations: [
+ {
+ description: (
+
+
+ {tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
+ )}
+
+
+
+ {tct(
+ 'Create [code:sentry.server.config.js], [code:sentry.client.config.js] and [code:sentry.edge.config.js] with the default [code:Sentry.init].',
+ {
+ code:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create or update the Next.js instrumentation file [instrumentationCode:instrumentation.ts] to initialize the SDK with the configuration files added in the previous step.',
+ {
+ instrumentationCode:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create or update your Next.js config [nextConfig:next.config.js] with the default Sentry configuration.',
+ {
+ nextConfig:
,
+ }
+ )}
+
+
+ {tct(
+ 'Create a [bundlerPluginsEnv:.env.sentry-build-plugin] with an auth token (which is used to upload source maps when building the application).',
+ {
+ bundlerPluginsEnv:
,
+ }
+ )}
+
+
+ {t('Add an example page to your app to verify your Sentry setup.')}
+
+
+
+ ),
+ },
+ ],
+ },
+ ],
+ verify: (params: Params) => [
+ {
+ type: StepType.VERIFY,
+ description: (
+
+ {tct(
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
+ {
+ code:
,
+ }
+ )}
+
{t(
- 'The Sentry wizard will automatically patch your application to configure the Sentry SDK:'
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
)}
-
-
- {tct(
- 'Create [code:sentry.server.config.js], [code:sentry.client.config.js] and [code:sentry.edge.config.js] with the default [code:Sentry.init].',
- {
- code:
,
- }
- )}
-
-
- {tct(
- 'Create or update the Next.js instrumentation file [instrumentationCode:instrumentation.ts] to initialize the SDK with the configuration files added in the previous step.',
- {
- instrumentationCode:
,
- }
- )}
-
-
- {tct(
- 'Create or update your Next.js config [nextConfig:next.config.js] with the default Sentry configuration.',
- {
- nextConfig:
,
- }
- )}
-
-
- {tct(
- 'Create a [bundlerPluginsEnv:.env.sentry-build-plugin] with an auth token (which is used to upload source maps when building the application).',
- {
- bundlerPluginsEnv:
,
- }
- )}
-
-
- {t('Add an example page to your app to verify your Sentry setup.')}
-
-
-
- {t('Manual Setup')}
+
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
+ },
+ ],
+ additionalInfo: (
+
- {tct(
- 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
- {
- manualSetupLink: (
-
- ),
- }
+ {t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry with Next.js.'
)}
-
+
{tct(
@@ -162,8 +208,6 @@ const onboarding: OnboardingConfig = {
),
},
],
- configure: () => [],
- verify: () => [],
};
const replayOnboarding: OnboardingConfig = {
@@ -388,13 +432,17 @@ const DSNText = styled('div')`
margin-bottom: ${space(0.5)};
`;
-const ManualSetupTitle = styled('p')`
- font-size: ${p => p.theme.fontSizeLarge};
- font-weight: ${p => p.theme.fontWeightBold};
-`;
-
const AdditionalInfoWrapper = styled('div')`
display: flex;
flex-direction: column;
gap: ${space(2)};
`;
+
+const Divider = styled('hr')`
+ height: 1px;
+ width: 100%;
+ background: ${p => p.theme.border};
+ border: none;
+ margin-top: ${space(1)};
+ margin-bottom: ${space(2)};
+`;
diff --git a/static/app/gettingStartedDocs/javascript/remix.spec.tsx b/static/app/gettingStartedDocs/javascript/remix.spec.tsx
index 6112ddb2683976..58c7e0a303dadb 100644
--- a/static/app/gettingStartedDocs/javascript/remix.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/remix.spec.tsx
@@ -9,13 +9,11 @@ describe('javascript-remix onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
- expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
-
- // Includes minimum required Astro version
- expect(screen.getByText(textWithMarkupMatcher(/Remix 1.0.0/))).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
- // Includes wizard command statement
+ // Includes configure statement
expect(
screen.getByText(textWithMarkupMatcher(/npx @sentry\/wizard@latest -i remix/))
).toBeInTheDocument();
diff --git a/static/app/gettingStartedDocs/javascript/remix.tsx b/static/app/gettingStartedDocs/javascript/remix.tsx
index 1ef810dfb412f5..0aa14e7fd28674 100644
--- a/static/app/gettingStartedDocs/javascript/remix.tsx
+++ b/static/app/gettingStartedDocs/javascript/remix.tsx
@@ -31,10 +31,11 @@ type Params = DocsParams;
const getConfigStep = ({isSelfHosted, organization, projectSlug}: Params) => {
const urlParam = isSelfHosted ? '' : '--saas';
+
return [
{
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -59,12 +60,23 @@ const onboarding: OnboardingConfig = {
tct("Sentry's integration with [remixLink:Remix] supports Remix 1.0.0 and above.", {
remixLink: ,
}),
- install: (params: Params) => getInstallConfig(params),
+ install: (params: Params) => [
+ {
+ title: t('Automatic Configuration (Recommended)'),
+ configurations: getConfigStep(params),
+ },
+ ],
configure: () => [
{
- type: StepType.CONFIGURE,
- description: t(
- 'The Sentry wizard will automatically add code to your project to inialize and configure the Sentry SDK:'
+ collapsible: true,
+ title: t('Manual Configuration'),
+ description: tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
),
configurations: [
{
@@ -105,23 +117,46 @@ const onboarding: OnboardingConfig = {
),
},
+ ],
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: (
+
+
+ {tct(
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
+ {
+ code:
,
+ }
+ )}
+
+
+ {t(
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
+ )}
+
+
+ ),
+ configurations: [
{
- description: tct(
- 'You can also further [manualConfigure:configure your SDK] or [manualSetupLink:set it up manually], without the wizard.',
+ code: [
{
- manualConfigure: (
-
- ),
- manualSetupLink: (
-
- ),
- }
- ),
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
},
],
+ additionalInfo: t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry.'
+ ),
},
],
- verify: () => [],
nextSteps: () => [],
};
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
index a88d5c93e60f7f..8157e2dead41ca 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.spec.tsx
@@ -11,8 +11,9 @@ describe('javascript-sveltekit onboarding docs', function () {
renderWithOnboardingLayout(docs);
// Renders main headings
- expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
- expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Automatic Configuration (Recommended)'})
+ ).toBeInTheDocument();
// Includes configure statement
expect(
@@ -20,7 +21,7 @@ describe('javascript-sveltekit onboarding docs', function () {
).toBeInTheDocument();
});
- it('displays the configure instructions', () => {
+ it('displays the verify instructions', () => {
renderWithOnboardingLayout(docs, {
selectedProducts: [
ProductSolution.ERROR_MONITORING,
@@ -30,11 +31,7 @@ describe('javascript-sveltekit onboarding docs', function () {
});
expect(
- screen.queryByText(textWithMarkupMatcher(/vite.config.js/))
- ).toBeInTheDocument();
- expect(
- screen.queryByText(textWithMarkupMatcher(/src\/hooks.server.js/))
+ screen.queryByText(textWithMarkupMatcher(/sentry-example-page/))
).toBeInTheDocument();
- expect(screen.queryByText(textWithMarkupMatcher(/.sentryclirc/))).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
index 51b7bc5df5f99f..42c6e4e4e2c778 100644
--- a/static/app/gettingStartedDocs/javascript/sveltekit.tsx
+++ b/static/app/gettingStartedDocs/javascript/sveltekit.tsx
@@ -1,3 +1,5 @@
+import {Fragment} from 'react';
+
import ExternalLink from 'sentry/components/links/externalLink';
import List from 'sentry/components/list/';
import ListItem from 'sentry/components/list/listItem';
@@ -27,14 +29,14 @@ import {t, tct} from 'sentry/locale';
type Params = DocsParams;
-const getInstallConfig = ({isSelfHosted, organization, projectSlug}: Params) => {
+const getConfigStep = ({isSelfHosted, organization, projectSlug}: Params) => {
const urlParam = isSelfHosted ? '' : '--saas';
return [
{
type: StepType.INSTALL,
description: tct(
- 'Configure your app automatically with the [wizardLink:Sentry wizard].',
+ 'Configure your app automatically by running the [wizardLink:Sentry wizard] in the root of your project.',
{
wizardLink: (
@@ -51,13 +53,31 @@ const getInstallConfig = ({isSelfHosted, organization, projectSlug}: Params) =>
];
};
+const getInstallConfig = (params: Params) => [
+ {
+ type: StepType.INSTALL,
+ configurations: getConfigStep(params),
+ },
+];
+
const onboarding: OnboardingConfig = {
- install: (params: Params) => getInstallConfig(params),
+ install: (params: Params) => [
+ {
+ title: t('Automatic Configuration (Recommended)'),
+ configurations: getConfigStep(params),
+ },
+ ],
configure: () => [
{
- type: StepType.CONFIGURE,
- description: t(
- 'The Sentry wizard will automatically patch your application to configure the Sentry SDK:'
+ title: t('Manual Configuration'),
+ collapsible: true,
+ description: tct(
+ 'Alternatively, you can also [manualSetupLink:set up the SDK manually], by following these steps:',
+ {
+ manualSetupLink: (
+
+ ),
+ }
),
configurations: [
{
@@ -89,19 +109,47 @@ const onboarding: OnboardingConfig = {
),
- additionalInfo: tct(
- 'Alternatively, you can also [manualSetupLink:set up the SDK manually].',
+ },
+ ],
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: (
+
+
+ {tct(
+ 'Start your development server and visit [code:/sentry-example-page] if you have set it up. Click the button to trigger a test error.',
+ {
+ code:
,
+ }
+ )}
+
+
+ {t(
+ 'Or, trigger a sample error by calling a function that does not exist somewhere in your application.'
+ )}
+
+
+ ),
+ configurations: [
+ {
+ code: [
{
- manualSetupLink: (
-
- ),
- }
- ),
+ label: 'Javascript',
+ value: 'javascript',
+ language: 'javascript',
+ code: `myUndefinedFunction();`,
+ },
+ ],
},
],
+ additionalInfo: t(
+ 'If you see an issue in your Sentry dashboard, you have successfully set up Sentry.'
+ ),
},
],
- verify: () => [],
};
const replayOnboarding: OnboardingConfig = {
From da8f3a84dcbb1769738d4b5f35aa2af7a2e358d1 Mon Sep 17 00:00:00 2001
From: "Armen Zambrano G." <44410+armenzg@users.noreply.github.com>
Date: Thu, 3 Oct 2024 06:58:11 -0400
Subject: [PATCH 060/139] ref(deletions): Refactor error events task (#78378)
This is in preparation for Issue Platform deletions (#77794).
This is also a follow-up of #78293.
---
pyproject.toml | 1 +
src/sentry/deletions/defaults/group.py | 119 +++++++++++++++++--------
tests/sentry/deletions/test_group.py | 26 +++---
3 files changed, 96 insertions(+), 50 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 9da4cbf52b1779..d9bbffceab27ff 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -545,6 +545,7 @@ module = [
"sentry.web.frontend.auth_provider_login",
"sentry.web.frontend.csv",
"sentry_plugins.base",
+ "tests.sentry.deletions.test_group",
"tests.sentry.event_manager.test_event_manager",
"tests.sentry.grouping.test_fingerprinting",
"tests.sentry.hybridcloud.*",
diff --git a/src/sentry/deletions/defaults/group.py b/src/sentry/deletions/defaults/group.py
index 0a67e67bcc983c..0bb2f497bf813d 100644
--- a/src/sentry/deletions/defaults/group.py
+++ b/src/sentry/deletions/defaults/group.py
@@ -2,13 +2,15 @@
import os
from collections import defaultdict
-from collections.abc import Sequence
+from collections.abc import Mapping, Sequence
from typing import Any
from sentry import eventstore, eventstream, models, nodestore
from sentry.eventstore.models import Event
+from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group, GroupStatus
from sentry.models.rulefirehistory import RuleFireHistory
+from sentry.snuba.dataset import Dataset
from sentry.tasks.delete_seer_grouping_records import call_delete_seer_grouping_records_by_hash
from ..base import BaseDeletionTask, BaseRelation, ModelDeletionTask, ModelRelation
@@ -48,18 +50,21 @@
)
-class EventDataDeletionTask(BaseDeletionTask[Group]):
+class EventsBaseDeletionTask(BaseDeletionTask[Group]):
"""
- Deletes nodestore data, EventAttachment and UserReports for group
+ Base class to delete events associated to groups and its related models.
"""
# Number of events fetched from eventstore per chunk() call.
DEFAULT_CHUNK_SIZE = 10000
+ referrer = "deletions.group"
+ dataset: Dataset
def __init__(
self, manager: DeletionTaskManager, groups: Sequence[Group], **kwargs: Any
) -> None:
self.groups = groups
+ # Use self.last_event to keep track of the last event processed in the chunk method.
self.last_event: Event | None = None
self.set_group_and_project_ids()
super().__init__(manager, **kwargs)
@@ -73,25 +78,6 @@ def set_group_and_project_ids(self) -> None:
self.group_ids = group_ids
self.project_ids = list(self.project_groups.keys())
- def chunk(self) -> bool:
- """It deletes DEFAULT_CHUNK_SIZE number of events and related models.
- It returns a boolean to say if the deletion has completed."""
- events = self.get_unfetched_events()
- if events:
- self.delete_events_from_nodestore(events)
- self.delete_dangling_attachments_and_user_reports(events)
- # This value will be used in the next call to chunk
- self.last_event = events[-1]
- # As long as it returns True the task will keep iterating
- return True
- else:
- # Remove all group events now that their node data has been removed.
- for project_id, group_ids in self.project_groups.items():
- # A message is sent to Snuba that will handle deleting the events for the given groups in the project
- eventstream_state = eventstream.backend.start_delete_groups(project_id, group_ids)
- eventstream.backend.end_delete_groups(eventstream_state)
- return False
-
def get_unfetched_events(self) -> list[Event]:
conditions = []
if self.last_event is not None:
@@ -110,14 +96,45 @@ def get_unfetched_events(self) -> list[Event]:
conditions=conditions, project_ids=self.project_ids, group_ids=self.group_ids
),
limit=self.DEFAULT_CHUNK_SIZE,
- referrer="deletions.group",
+ referrer=self.referrer,
orderby=["-timestamp", "-event_id"],
- tenant_ids=(
- {"organization_id": self.groups[0].project.organization_id} if self.groups else None
- ),
+ tenant_ids=self.tenant_ids,
+ dataset=self.dataset,
)
return events
+ @property
+ def tenant_ids(self) -> Mapping[str, Any]:
+ result = {"referrer": self.referrer}
+ if self.groups:
+ result["organization_id"] = self.groups[0].project.organization_id
+ return result
+
+
+class ErrorEventsDeletionTask(EventsBaseDeletionTask):
+ """
+ Deletes nodestore data, EventAttachment and UserReports for requested groups.
+
+ This class uses the old Snuba deletion method.
+ """
+
+ dataset = Dataset.Events
+
+ def chunk(self) -> bool:
+ """This method is called to delete chunks of data. It returns a boolean to say
+ if the deletion has completed and if it needs to be called again."""
+ events = self.get_unfetched_events()
+ if events:
+ self.delete_events_from_nodestore(events)
+ self.delete_dangling_attachments_and_user_reports(events)
+ # This value will be used in the next call to chunk
+ self.last_event = events[-1]
+ # As long as it returns True the task will keep iterating
+ return True
+ else:
+ self.delete_events_from_snuba()
+ return False
+
def delete_events_from_nodestore(self, events: Sequence[Event]) -> None:
# Remove from nodestore
node_ids = [Event.generate_node_id(event.project_id, event.event_id) for event in events]
@@ -135,6 +152,12 @@ def delete_dangling_attachments_and_user_reports(self, events: Sequence[Event])
event_id__in=event_ids, project_id__in=self.project_ids
).delete()
+ def delete_events_from_snuba(self) -> None:
+ # Remove all group events now that their node data has been removed.
+ for project_id, group_ids in self.project_groups.items():
+ eventstream_state = eventstream.backend.start_delete_groups(project_id, group_ids)
+ eventstream.backend.end_delete_groups(eventstream_state)
+
class GroupDeletionTask(ModelDeletionTask[Group]):
# Delete groups in blocks of 1000. Using 1000 aims to
@@ -146,31 +169,41 @@ def delete_bulk(self, instance_list: Sequence[Group]) -> bool:
Group deletion operates as a quasi-bulk operation so that we don't flood
snuba replacements with deletions per group.
"""
- self.mark_deletion_in_progress(instance_list)
+ if not instance_list:
+ return True
- group_ids = [group.id for group in instance_list]
+ self.mark_deletion_in_progress(instance_list)
+ error_group_ids = [
+ group.id for group in instance_list if group.issue_category == GroupCategory.ERROR
+ ]
# Tell seer to delete grouping records with these group hashes
- call_delete_seer_grouping_records_by_hash(group_ids)
+ call_delete_seer_grouping_records_by_hash(error_group_ids)
+
+ self._delete_children(instance_list)
+
+ # Remove group objects with children removed.
+ self.delete_instance_bulk(instance_list)
+ return False
+
+ def _delete_children(self, instance_list: Sequence[Group]) -> None:
+ group_ids = [group.id for group in instance_list]
# Remove child relations for all groups first.
child_relations: list[BaseRelation] = []
for model in _GROUP_RELATED_MODELS:
child_relations.append(ModelRelation(model, {"group_id__in": group_ids}))
+ error_groups, _ = separate_by_group_category(instance_list)
+
# If this isn't a retention cleanup also remove event data.
if not os.environ.get("_SENTRY_CLEANUP"):
- child_relations.append(
- BaseRelation(params={"groups": instance_list}, task=EventDataDeletionTask)
- )
+ if error_groups:
+ params = {"groups": error_groups}
+ child_relations.append(BaseRelation(params=params, task=ErrorEventsDeletionTask))
self.delete_children(child_relations)
- # Remove group objects with children removed.
- self.delete_instance_bulk(instance_list)
-
- return False
-
def delete_instance(self, instance: Group) -> None:
from sentry import similarity
@@ -183,3 +216,15 @@ def mark_deletion_in_progress(self, instance_list: Sequence[Group]) -> None:
Group.objects.filter(id__in=[i.id for i in instance_list]).exclude(
status=GroupStatus.DELETION_IN_PROGRESS
).update(status=GroupStatus.DELETION_IN_PROGRESS, substatus=None)
+
+
+def separate_by_group_category(instance_list: Sequence[Group]) -> tuple[list[Group], list[Group]]:
+ error_groups = []
+ issue_platform_groups = []
+ for group in instance_list:
+ (
+ error_groups.append(group)
+ if group.issue_category == GroupCategory.ERROR
+ else issue_platform_groups.append(group)
+ )
+ return error_groups, issue_platform_groups
diff --git a/tests/sentry/deletions/test_group.py b/tests/sentry/deletions/test_group.py
index ae29b4825e34ea..7670239ec4dad2 100644
--- a/tests/sentry/deletions/test_group.py
+++ b/tests/sentry/deletions/test_group.py
@@ -3,10 +3,10 @@
from uuid import uuid4
from sentry import nodestore
-from sentry.deletions.defaults.group import EventDataDeletionTask
+from sentry.deletions.defaults.group import ErrorEventsDeletionTask
from sentry.deletions.tasks.groups import delete_groups
from sentry.eventstore.models import Event
-from sentry.issues.grouptype import ReplayDeadClickType
+from sentry.issues.grouptype import FeedbackGroup
from sentry.models.eventattachment import EventAttachment
from sentry.models.files.file import File
from sentry.models.group import Group
@@ -22,7 +22,7 @@
class DeleteGroupTest(TestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
one_minute = iso_format(before_now(minutes=1))
group1_data = {"timestamp": one_minute, "fingerprint": ["group1"]}
@@ -61,8 +61,8 @@ def setUp(self):
GroupMeta.objects.create(group=group, key="foo", value="bar")
GroupRedirect.objects.create(group_id=group.id, previous_group_id=1)
- def test_simple(self):
- EventDataDeletionTask.DEFAULT_CHUNK_SIZE = 1 # test chunking logic
+ def test_simple(self) -> None:
+ ErrorEventsDeletionTask.DEFAULT_CHUNK_SIZE = 1 # test chunking logic
group = self.event.group
assert nodestore.backend.get(self.node_id)
assert nodestore.backend.get(self.node_id2)
@@ -83,7 +83,7 @@ def test_simple(self):
assert nodestore.backend.get(self.keep_node_id), "Does not remove from second group"
assert Group.objects.filter(id=self.keep_event.group_id).exists()
- def test_simple_multiple_groups(self):
+ def test_simple_multiple_groups(self) -> None:
other_event = self.store_event(
data={"timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group3"]},
project_id=self.project.id,
@@ -102,7 +102,7 @@ def test_simple_multiple_groups(self):
assert Group.objects.filter(id=self.keep_event.group_id).exists()
assert nodestore.backend.get(self.keep_node_id)
- def test_grouphistory_relation(self):
+ def test_grouphistory_relation(self) -> None:
other_event = self.store_event(
data={"timestamp": iso_format(before_now(minutes=1)), "fingerprint": ["group3"]},
project_id=self.project.id,
@@ -133,7 +133,7 @@ def test_grouphistory_relation(self):
@mock.patch("os.environ.get")
@mock.patch("sentry.nodestore.delete_multi")
- def test_cleanup(self, nodestore_delete_multi, os_environ):
+ def test_cleanup(self, nodestore_delete_multi: mock.Mock, os_environ: mock.Mock) -> None:
os_environ.side_effect = lambda key: "1" if key == "_SENTRY_CLEANUP" else None
group = self.event.group
@@ -146,8 +146,8 @@ def test_cleanup(self, nodestore_delete_multi, os_environ):
"sentry.tasks.delete_seer_grouping_records.delete_seer_grouping_records_by_hash.apply_async"
)
def test_delete_groups_delete_grouping_records_by_hash(
- self, mock_delete_seer_grouping_records_by_hash_apply_async
- ):
+ self, mock_delete_seer_grouping_records_by_hash_apply_async: mock.Mock
+ ) -> None:
self.project.update_option("sentry:similarity_backfill_completed", int(time()))
other_event = self.store_event(
data={
@@ -182,13 +182,13 @@ def test_delete_groups_delete_grouping_records_by_hash(
class DeleteIssuePlatformTest(TestCase, SnubaTestCase, OccurrenceTestMixin):
- def test_issue_platform(self):
+ def test_issue_platform(self) -> None:
event = self.store_event(data={}, project_id=self.project.id)
issue_occurrence, group_info = self.process_occurrence(
event_id=event.event_id,
project_id=self.project.id,
# We are using ReplayDeadClickType as a representative of Issue Platform
- type=ReplayDeadClickType.type_id,
+ type=FeedbackGroup.type_id,
event_data={
"fingerprint": ["issue-platform-group"],
"timestamp": before_now(minutes=1).isoformat(),
@@ -207,7 +207,7 @@ def test_issue_platform(self):
assert nodestore.backend.get(node_id)
# The Issue Platform group and occurrence are deleted
- assert issue_platform_group.issue_type == ReplayDeadClickType
+ assert issue_platform_group.issue_type == FeedbackGroup
assert not Group.objects.filter(id=issue_platform_group.id).exists()
node_id = Event.generate_node_id(issue_occurrence.project_id, issue_occurrence.id)
assert not nodestore.backend.get(node_id)
From fda86cf1a0c23beb5add3f3b349ea32ba343c260 Mon Sep 17 00:00:00 2001
From: Sebastian Zivota
Date: Thu, 3 Oct 2024 13:26:06 +0200
Subject: [PATCH 061/139] fix: Remove logging and sampling related to INC-893
(#78508)
In commits 7c9844a09165538579c877bad0d7091797e2c613,
6bc52ff8ca17b9628fb70d57f4de1b7d34371855, and
8203c4e938b5194b30f5a9069a6ce4c682ed36c5 I added some logging and
sampling in an effort to diagnose INC-893. Since that incident is now
resolved, we can remove it again.
---
src/sentry/tasks/assemble.py | 28 ----------------------------
src/sentry/utils/sdk.py | 9 ---------
2 files changed, 37 deletions(-)
diff --git a/src/sentry/tasks/assemble.py b/src/sentry/tasks/assemble.py
index ef4690e9e77a59..4fce31f2c6d82e 100644
--- a/src/sentry/tasks/assemble.py
+++ b/src/sentry/tasks/assemble.py
@@ -240,10 +240,6 @@ def assemble_dif(project_id, name, checksum, chunks, debug_id=None, **kwargs):
project = Project.objects.filter(id=project_id).get()
set_assemble_status(AssembleTask.DIF, project_id, checksum, ChunkFileState.ASSEMBLING)
- logger.info(
- "assembling file",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
# Assemble the chunks into a temporary file
rv = assemble_file(
AssembleTask.DIF, project, name, checksum, chunks, file_type="project.dif"
@@ -255,10 +251,6 @@ def assemble_dif(project_id, name, checksum, chunks, debug_id=None, **kwargs):
if rv is None:
return
- logger.info(
- "file successfully assembled",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
file, temp_file = rv
delete_file = True
@@ -266,42 +258,22 @@ def assemble_dif(project_id, name, checksum, chunks, debug_id=None, **kwargs):
# We only permit split difs to hit this endpoint.
# The client is required to split them up first or we error.
try:
- logger.info(
- "detecting dif",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
result = detect_dif_from_path(temp_file.name, name=name, debug_id=debug_id)
except BadDif as e:
- logger.exception(
- "failed to detect dif",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
set_assemble_status(
AssembleTask.DIF, project_id, checksum, ChunkFileState.ERROR, detail=e.args[0]
)
return
if len(result) != 1:
- logger.error(
- "Object contains more than 1 architecture",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
detail = "Object contains %s architectures (1 expected)" % len(result)
set_assemble_status(
AssembleTask.DIF, project_id, checksum, ChunkFileState.ERROR, detail=detail
)
return
- logger.info(
- "creating `DIF` object",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
dif, created = create_dif_from_id(project, result[0], file=file)
delete_file = False
- logger.info(
- "`DIF` created",
- extra={"project_id": project_id, "checksum": checksum, "debug_id": debug_id},
- )
if created:
record_last_upload(project)
diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index 9cc9539a62e1e0..9ac72840765e3f 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -28,7 +28,6 @@
from sentry.utils.db import DjangoAtomicIntegration
from sentry.utils.flag import get_flags_serialized
from sentry.utils.rust import RustInfoIntegration
-from sentry.utils.safe import get_path
# Can't import models in utils because utils should be the bottom of the food chain
if TYPE_CHECKING:
@@ -195,14 +194,6 @@ def traces_sampler(sampling_context):
if "celery_job" in sampling_context:
task_name = sampling_context["celery_job"].get("task")
- # Temporarily sample the `assemble_dif` task at 100% for the
- # sentry-test/rust project for debugging purposes
- if (
- task_name == "sentry.tasks.assemble.assemble_dif"
- and get_path(sampling_context, "celery_job", "kwargs", "project_id") == 1041156
- ):
- return 1.0
-
if task_name in SAMPLED_TASKS:
return SAMPLED_TASKS[task_name]
From 5ba782e35555e4f4acddce7138be1c2f74df148b Mon Sep 17 00:00:00 2001
From: "Armen Zambrano G." <44410+armenzg@users.noreply.github.com>
Date: Thu, 3 Oct 2024 07:37:49 -0400
Subject: [PATCH 062/139] ref(tests): Validate current Issue Platform deletion
behaviour (#78421)
This reduces the review burden of #77794 and validates the current
behaviour.
---
tests/sentry/deletions/test_group.py | 92 +++++++++++++++++++++++-----
1 file changed, 77 insertions(+), 15 deletions(-)
diff --git a/tests/sentry/deletions/test_group.py b/tests/sentry/deletions/test_group.py
index 7670239ec4dad2..5095133f44ec4a 100644
--- a/tests/sentry/deletions/test_group.py
+++ b/tests/sentry/deletions/test_group.py
@@ -1,12 +1,18 @@
+import random
+from datetime import datetime, timedelta
from time import time
from unittest import mock
from uuid import uuid4
+from snuba_sdk import Column, Condition, Entity, Function, Op, Query, Request
+
from sentry import nodestore
from sentry.deletions.defaults.group import ErrorEventsDeletionTask
from sentry.deletions.tasks.groups import delete_groups
+from sentry.event_manager import GroupInfo
from sentry.eventstore.models import Event
-from sentry.issues.grouptype import FeedbackGroup
+from sentry.issues.grouptype import FeedbackGroup, GroupCategory
+from sentry.issues.issue_occurrence import IssueOccurrence
from sentry.models.eventattachment import EventAttachment
from sentry.models.files.file import File
from sentry.models.group import Group
@@ -16,8 +22,10 @@
from sentry.models.groupmeta import GroupMeta
from sentry.models.groupredirect import GroupRedirect
from sentry.models.userreport import UserReport
+from sentry.snuba.dataset import Dataset, EntityKey
from sentry.testutils.cases import SnubaTestCase, TestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.utils.snuba import bulk_snuba_queries
from tests.sentry.issues.test_utils import OccurrenceTestMixin
@@ -182,32 +190,86 @@ def test_delete_groups_delete_grouping_records_by_hash(
class DeleteIssuePlatformTest(TestCase, SnubaTestCase, OccurrenceTestMixin):
- def test_issue_platform(self) -> None:
- event = self.store_event(data={}, project_id=self.project.id)
- issue_occurrence, group_info = self.process_occurrence(
+ referrer = "testing.test"
+
+ def create_occurrence(
+ self, event: Event, type_id: int
+ ) -> tuple[IssueOccurrence, GroupInfo | None]:
+ occurrence, issue_platform_group = self.process_occurrence(
event_id=event.event_id,
- project_id=self.project.id,
- # We are using ReplayDeadClickType as a representative of Issue Platform
- type=FeedbackGroup.type_id,
- event_data={
- "fingerprint": ["issue-platform-group"],
- "timestamp": before_now(minutes=1).isoformat(),
- },
+ project_id=event.project.id,
+ type=type_id,
+ event_data={},
)
+ return occurrence, issue_platform_group
+
+ def select_issue_platform_events(self, project_id: int) -> object:
+ columns = ["event_id", "group_id", "occurrence_id"]
+ return self.select_rows(Entity(EntityKey.IssuePlatform.value), columns, project_id)
+
+ def select_rows(self, entity: Entity, columns: list[str], project_id: int) -> object:
+ # Adding the random microseconds is to circumvent Snuba's caching mechanism
+ now = datetime.now()
+ start_time = now - timedelta(days=1, microseconds=random.randint(0, 100000000))
+ end_time = now + timedelta(days=1, microseconds=random.randint(0, 100000000))
+
+ select = [Column(column) for column in columns]
+ where = [
+ Condition(Column("project_id"), Op.IN, Function("tuple", [project_id])),
+ Condition(Column("timestamp"), Op.GTE, start_time),
+ Condition(Column("timestamp"), Op.LT, end_time),
+ ]
+ query = Query(match=entity, select=select, where=where)
+ request = Request(
+ dataset=Dataset.IssuePlatform.value,
+ app_id=self.referrer,
+ query=query,
+ tenant_ids=self.tenant_ids,
+ )
+ results = bulk_snuba_queries([request])[0]["data"]
+ return results
+
+ @property
+ def tenant_ids(self) -> dict[str, str]:
+ return {"referrer": self.referrer, "organization_id": self.organization.id}
+
+ def test_issue_platform(self) -> None:
+ # Adding this query here to make sure that the cache is not being used
+ assert self.select_issue_platform_events(self.project.id) == []
+ # Create initial error event and occurrence related to it; two different groups will exist
+ event = self.store_event(data={}, project_id=self.project.id)
+ occurrence, group_info = self.create_occurrence(event, type_id=FeedbackGroup.type_id)
+
+ # Assertions after creation
+ assert occurrence.id != event.event_id
assert group_info is not None
issue_platform_group = group_info.group
assert event.group_id != issue_platform_group.id
+ assert event.group.issue_category == GroupCategory.ERROR
+ assert issue_platform_group.issue_category != GroupCategory.ERROR
+ # Assert that the occurrence has been inserted in Snuba
+ expected = [
+ {
+ "event_id": event.event_id,
+ "group_id": issue_platform_group.id,
+ "occurrence_id": occurrence.id,
+ }
+ ]
+ assert self.select_issue_platform_events(self.project.id) == expected
+ # This will delete the group and the events from the node store
with self.tasks():
delete_groups(object_ids=[issue_platform_group.id])
# The original event and group still exist
assert Group.objects.filter(id=event.group_id).exists()
- node_id = Event.generate_node_id(event.project_id, event.event_id)
- assert nodestore.backend.get(node_id)
+ event_node_id = Event.generate_node_id(event.project_id, event.event_id)
+ assert nodestore.backend.get(event_node_id)
# The Issue Platform group and occurrence are deleted
assert issue_platform_group.issue_type == FeedbackGroup
assert not Group.objects.filter(id=issue_platform_group.id).exists()
- node_id = Event.generate_node_id(issue_occurrence.project_id, issue_occurrence.id)
- assert not nodestore.backend.get(node_id)
+ occurrence_node_id = Event.generate_node_id(occurrence.project_id, occurrence.id)
+ assert not nodestore.backend.get(occurrence_node_id)
+ # We don't yet delete the occurrence from Snuba but it will expire with the TTL
+ assert self.select_issue_platform_events(self.project.id) == expected
From d96c6332889e2d8e1119bc60f1593766f1323f1a Mon Sep 17 00:00:00 2001
From: Ash <0Calories@users.noreply.github.com>
Date: Thu, 3 Oct 2024 10:23:10 -0400
Subject: [PATCH 063/139] chore(insights): `useNavigate` instead of
`browserHistory` in DB module search (#78519)
---
.../views/insights/database/views/databaseLandingPage.tsx | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/static/app/views/insights/database/views/databaseLandingPage.tsx b/static/app/views/insights/database/views/databaseLandingPage.tsx
index 44ec975b08d07d..b9e9b8c30bb1da 100644
--- a/static/app/views/insights/database/views/databaseLandingPage.tsx
+++ b/static/app/views/insights/database/views/databaseLandingPage.tsx
@@ -9,10 +9,10 @@ import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionT
import SearchBar from 'sentry/components/searchBar';
import {t} from 'sentry/locale';
import {trackAnalytics} from 'sentry/utils/analytics';
-import {browserHistory} from 'sentry/utils/browserHistory';
import {decodeScalar, decodeSorts} from 'sentry/utils/queryString';
import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
import useOrganization from 'sentry/utils/useOrganization';
import {useSynchronizeCharts} from 'sentry/views/insights/common/components/chart';
import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout';
@@ -70,13 +70,15 @@ export function DatabaseLandingPage() {
sort = DEFAULT_SORT;
}
+ const navigate = useNavigate();
+
const handleSearch = (newQuery: string) => {
trackAnalytics('insight.general.search', {
organization,
query: newQuery,
source: ModuleName.DB,
});
- browserHistory.push({
+ navigate({
...location,
query: {
...location.query,
From 610b7bd40d380c7fb4a80a8b987711266fcd753e Mon Sep 17 00:00:00 2001
From: Priscila Oliveira
Date: Thu, 3 Oct 2024 16:38:40 +0200
Subject: [PATCH 064/139] fix(source-maps): Update logic to display Unminify
Code button (#78548)
---
.../components/events/interfaces/frame/deprecatedLine.tsx | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/static/app/components/events/interfaces/frame/deprecatedLine.tsx b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
index 9118969f3a9bcf..53e07d89451955 100644
--- a/static/app/components/events/interfaces/frame/deprecatedLine.tsx
+++ b/static/app/components/events/interfaces/frame/deprecatedLine.tsx
@@ -319,8 +319,9 @@ export class DeprecatedLine extends Component {
);
const frameHasValidFileEndingForSourceMapDebugger =
- VALID_SOURCE_MAP_DEBUGGER_FILE_ENDINGS.some(ending =>
- (data.absPath || data.filename || '').endsWith(ending)
+ VALID_SOURCE_MAP_DEBUGGER_FILE_ENDINGS.some(
+ ending =>
+ (data.absPath ?? '').endsWith(ending) || (data.filename ?? '').endsWith(ending)
);
const shouldShowSourceMapDebuggerButton =
From 1527e4dd796902f9562829c7119d91b1deb8b43a Mon Sep 17 00:00:00 2001
From: Abdullah Khan <60121741+Abdkhan14@users.noreply.github.com>
Date: Thu, 3 Oct 2024 10:43:30 -0400
Subject: [PATCH 065/139] feat(explore): Syncing chart cursors. (#78285)
https://github.com/user-attachments/assets/53982563-632e-4f7e-b1ed-12bf9bce8a59
Co-authored-by: Abdullah Khan
---
static/app/views/explore/charts/index.tsx | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/static/app/views/explore/charts/index.tsx b/static/app/views/explore/charts/index.tsx
index 87d898403a3c35..b5484c2dac005f 100644
--- a/static/app/views/explore/charts/index.tsx
+++ b/static/app/views/explore/charts/index.tsx
@@ -1,5 +1,6 @@
-import {Fragment, useCallback, useMemo} from 'react';
+import {Fragment, useCallback, useEffect, useMemo, useState} from 'react';
import styled from '@emotion/styled';
+import * as echarts from 'echarts/core';
import {getInterval} from 'sentry/components/charts/utils';
import {CompactSelect} from 'sentry/components/compactSelect';
@@ -44,6 +45,8 @@ const exploreChartTypeOptions = [
},
];
+export const EXPLORE_CHART_GROUP = 'explore-charts_group';
+
// TODO: Update to support aggregate mode and multiple queries / visualizations
export function ExploreCharts({query}: ExploreChartsProps) {
const pageFilters = usePageFilters();
@@ -113,6 +116,15 @@ export function ExploreCharts({query}: ExploreChartsProps) {
[visualizes, setVisualizes]
);
+ // Synchronize chart cursors
+ const [_, setRenderTrigger] = useState(0);
+ useEffect(() => {
+ if (!timeSeriesResult.isPending) {
+ echarts?.connect(EXPLORE_CHART_GROUP);
+ setRenderTrigger(prev => (prev + 1) % Number.MAX_SAFE_INTEGER);
+ }
+ }, [visualizes, timeSeriesResult.isPending]);
+
return (
{visualizes.map((visualize, index) => {
@@ -154,6 +166,7 @@ export function ExploreCharts({query}: ExploreChartsProps) {
data={getSeries(dedupedYAxes)}
error={timeSeriesResult.error}
loading={timeSeriesResult.isPending}
+ chartGroup={EXPLORE_CHART_GROUP}
// TODO Abdullah: Make chart colors dynamic, with changing topN events count and overlay count.
chartColors={CHART_PALETTE[TOP_EVENTS_LIMIT - 1]}
type={chartType}
From d32bfc22feb6753e011117e55131b209f87338d4 Mon Sep 17 00:00:00 2001
From: Tony Xiao
Date: Thu, 3 Oct 2024 11:06:55 -0400
Subject: [PATCH 066/139] feat(eap): Use RPC to fetch tag values (#78469)
This uses RPC calls to fetch tag values from eap spans.
---
.../endpoints/organization_spans_fields.py | 67 ++-
src/sentry/snuba/referrer.py | 2 +
src/sentry/utils/snuba_rpc.py | 7 +-
.../test_organization_spans_fields.py | 388 ++++++++++++++----
4 files changed, 390 insertions(+), 74 deletions(-)
diff --git a/src/sentry/api/endpoints/organization_spans_fields.py b/src/sentry/api/endpoints/organization_spans_fields.py
index 28f7ec8c478823..dfca6b5dee84f2 100644
--- a/src/sentry/api/endpoints/organization_spans_fields.py
+++ b/src/sentry/api/endpoints/organization_spans_fields.py
@@ -7,6 +7,8 @@
from rest_framework.request import Request
from rest_framework.response import Response
from sentry_protos.snuba.v1alpha.endpoint_tags_list_pb2 import (
+ AttributeValuesRequest,
+ AttributeValuesResponse,
TraceItemAttributesRequest,
TraceItemAttributesResponse,
)
@@ -20,6 +22,7 @@
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
+from sentry.api.event_search import translate_escape_sequences
from sentry.api.paginator import ChainPaginator
from sentry.api.serializers import serialize
from sentry.api.utils import handle_query_errors
@@ -195,6 +198,68 @@ def get(self, request: Request, organization, key: str) -> Response:
max_span_tag_values = options.get("performance.spans-tags-values.max")
+ serializer = OrganizationSpansFieldsEndpointSerializer(data=request.GET)
+ if not serializer.is_valid():
+ return Response(serializer.errors, status=400)
+ serialized = serializer.validated_data
+
+ if serialized["dataset"] == "spans" and features.has(
+ "organizations:visibility-explore-dataset", organization, actor=request.user
+ ):
+ start_timestamp = Timestamp()
+ start_timestamp.FromDatetime(
+ snuba_params.start_date.replace(hour=0, minute=0, second=0, microsecond=0)
+ )
+
+ end_timestamp = Timestamp()
+ end_timestamp.FromDatetime(
+ snuba_params.end_date.replace(hour=0, minute=0, second=0, microsecond=0)
+ + timedelta(days=1)
+ )
+
+ query = translate_escape_sequences(request.GET.get("query", ""))
+ rpc_request = AttributeValuesRequest(
+ meta=RequestMeta(
+ organization_id=organization.id,
+ cogs_category="performance",
+ referrer=Referrer.API_SPANS_TAG_VALUES_RPC.value,
+ project_ids=snuba_params.project_ids,
+ start_timestamp=start_timestamp,
+ end_timestamp=end_timestamp,
+ trace_item_name=TraceItemName.TRACE_ITEM_NAME_EAP_SPANS,
+ ),
+ name=key,
+ value_substring_match=query,
+ limit=max_span_tag_values,
+ offset=0,
+ )
+ rpc_response = snuba_rpc.rpc(rpc_request, AttributeValuesResponse)
+
+ paginator = ChainPaginator(
+ [
+ [
+ TagValue(
+ key=key,
+ value=tag_value,
+ times_seen=None,
+ first_seen=None,
+ last_seen=None,
+ )
+ for tag_value in rpc_response.values
+ if tag_value
+ ]
+ ],
+ max_limit=max_span_tag_values,
+ )
+
+ return self.paginate(
+ request=request,
+ paginator=paginator,
+ on_results=lambda results: serialize(results, request.user),
+ default_per_page=max_span_tag_values,
+ max_per_page=max_span_tag_values,
+ )
+
executor = SpanFieldValuesAutocompletionExecutor(
snuba_params=snuba_params,
key=key,
@@ -339,7 +404,7 @@ def get_autocomplete_query_base(self) -> BaseQueryBuilder:
def get_autocomplete_results(self, query: BaseQueryBuilder) -> list[TagValue]:
with handle_query_errors():
- results = query.process_results(query.run_query(Referrer.API_SPANS_TAG_KEYS.value))
+ results = query.process_results(query.run_query(Referrer.API_SPANS_TAG_VALUES.value))
return [
TagValue(
diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py
index fb4ec76c6796cb..fc41130707c75a 100644
--- a/src/sentry/snuba/referrer.py
+++ b/src/sentry/snuba/referrer.py
@@ -474,6 +474,8 @@ class Referrer(Enum):
API_TRACE_EXPLORER_TRACE_SPANS_LIST = "api.trace-explorer.trace-spans-list"
API_SPANS_TAG_KEYS = "api.spans.tags-keys"
API_SPANS_TAG_KEYS_RPC = "api.spans.tags-keys.rpc"
+ API_SPANS_TAG_VALUES = "api.spans.tags-values"
+ API_SPANS_TAG_VALUES_RPC = "api.spans.tags-values.rpc"
API_SPANS_TRACE_VIEW = "api.spans.trace-view"
# Performance Mobile UI Module
diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py
index dc686ae9e78a20..63404a1286629b 100644
--- a/src/sentry/utils/snuba_rpc.py
+++ b/src/sentry/utils/snuba_rpc.py
@@ -56,9 +56,14 @@ def rpc(req: SnubaRPCRequest, resp_type: type[RPCResponseType]) -> RPCResponseTy
referrer = req.meta.referrer
with sentry_sdk.start_span(op="snuba_rpc.run", description=req.__class__.__name__) as span:
span.set_tag("snuba.referrer", referrer)
+
+ cls = req.__class__
+ class_name = cls.__name__
+ class_version = cls.__module__.split(".", 3)[2]
+
http_resp = _snuba_pool.urlopen(
"POST",
- f"/rpc/{req.__class__.__name__}/v1alpha",
+ f"/rpc/{class_name}/{class_version}",
body=req.SerializeToString(),
headers={
"referer": referrer,
diff --git a/tests/sentry/api/endpoints/test_organization_spans_fields.py b/tests/sentry/api/endpoints/test_organization_spans_fields.py
index 1c4138743f08cd..b17b6d71eefac8 100644
--- a/tests/sentry/api/endpoints/test_organization_spans_fields.py
+++ b/tests/sentry/api/endpoints/test_organization_spans_fields.py
@@ -1,3 +1,4 @@
+from unittest import mock
from uuid import uuid4
import pytest
@@ -87,7 +88,6 @@ def do_request(self, query=None, features=None, **kwargs):
**kwargs,
)
- @pytest.mark.skip("rpc seems to have changed")
def test_tags_list(self):
for tag in ["foo", "bar", "baz"]:
self.store_segment(
@@ -123,6 +123,7 @@ def test_tags_list(self):
class OrganizationSpansTagKeyValuesEndpointTest(BaseSpansTestCase, APITestCase):
+ is_eap = False
view = "sentry-api-0-organization-spans-fields-values"
def setUp(self):
@@ -131,7 +132,7 @@ def setUp(self):
def do_request(self, key: str, query=None, features=None, **kwargs):
if features is None:
- features = ["organizations:performance-trace-explorer", "organizations:global-views"]
+ features = ["organizations:performance-trace-explorer"]
with self.feature(features):
return self.client.get(
reverse(
@@ -160,44 +161,174 @@ def test_tags_keys(self):
uuid4().hex,
uuid4().hex,
span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
parent_span_id=None,
timestamp=timestamp,
transaction="foo",
duration=100,
exclusive_time=100,
tags={"tag": tag},
+ is_eap=self.is_eap,
)
response = self.do_request("tag")
assert response.status_code == 200, response.data
assert response.data == [
{
- "count": 1,
+ "count": mock.ANY,
"key": "tag",
"value": "bar",
"name": "bar",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
},
{
- "count": 1,
+ "count": mock.ANY,
"key": "tag",
"value": "baz",
"name": "baz",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
},
{
- "count": 1,
+ "count": mock.ANY,
"key": "tag",
"value": "foo",
"name": "foo",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
+
+ def test_transaction_keys_autocomplete(self):
+ timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
+ for transaction in ["foo", "*bar", "*baz"]:
+ self.store_segment(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
+ parent_span_id=None,
+ timestamp=timestamp,
+ transaction=transaction,
+ duration=100,
+ exclusive_time=100,
+ is_eap=self.is_eap,
+ )
+
+ key = "transaction"
+
+ response = self.do_request(key)
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "foo",
+ "name": "foo",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
+
+ def test_transaction_keys_autocomplete_substring(self):
+ timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
+ for transaction in ["foo", "*bar", "*baz"]:
+ self.store_segment(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
+ parent_span_id=None,
+ timestamp=timestamp,
+ transaction=transaction,
+ duration=100,
+ exclusive_time=100,
+ is_eap=self.is_eap,
+ )
+
+ key = "transaction"
+
+ response = self.do_request(key, query={"query": "b"})
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
+
+ def test_transaction_keys_autocomplete_substring_with_asterisk(self):
+ timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
+ for transaction in ["foo", "*bar", "*baz"]:
+ self.store_segment(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
+ parent_span_id=None,
+ timestamp=timestamp,
+ transaction=transaction,
+ duration=100,
+ exclusive_time=100,
+ is_eap=self.is_eap,
+ )
+
+ key = "transaction"
+
+ response = self.do_request(key, query={"query": r"\*b"})
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
},
]
- def test_tags_keys_autocomplete_default(self):
+ def test_tags_keys_autocomplete(self):
timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
for tag in ["foo", "*bar", "*baz"]:
self.store_segment(
@@ -205,70 +336,128 @@ def test_tags_keys_autocomplete_default(self):
uuid4().hex,
uuid4().hex,
span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
parent_span_id=None,
timestamp=timestamp,
- transaction=tag,
+ transaction="transaction",
duration=100,
exclusive_time=100,
tags={"tag": tag},
+ is_eap=self.is_eap,
)
- for key in ["tag", "transaction"]:
- response = self.do_request(key)
- assert response.status_code == 200, response.data
- assert response.data == [
- {
- "count": 1,
- "key": key,
- "value": "*bar",
- "name": "*bar",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- },
- {
- "count": 1,
- "key": key,
- "value": "*baz",
- "name": "*baz",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- },
- {
- "count": 1,
- "key": key,
- "value": "foo",
- "name": "foo",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- },
- ]
+ key = "tag"
- for key, query in [
- ("tag", "b"),
- ("transaction", "b"),
- ("tag", r"\*b"),
- ("transaction", r"\*b"),
- ]:
- response = self.do_request(key, query={"query": query})
- assert response.status_code == 200, response.data
- assert response.data == [
- {
- "count": 1,
- "key": key,
- "value": "*bar",
- "name": "*bar",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- },
- {
- "count": 1,
- "key": key,
- "value": "*baz",
- "name": "*baz",
- "firstSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- "lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
- },
- ]
+ response = self.do_request(key)
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "foo",
+ "name": "foo",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
+
+ def test_tags_keys_autocomplete_substring(self):
+ timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
+ for tag in ["foo", "*bar", "*baz"]:
+ self.store_segment(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
+ parent_span_id=None,
+ timestamp=timestamp,
+ transaction="transaction",
+ duration=100,
+ exclusive_time=100,
+ tags={"tag": tag},
+ is_eap=self.is_eap,
+ )
+
+ key = "tag"
+
+ response = self.do_request(key, query={"query": "b"})
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
+
+ def test_tags_keys_autocomplete_substring_with_asterisks(self):
+ timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
+ for tag in ["foo", "*bar", "*baz"]:
+ self.store_segment(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
+ parent_span_id=None,
+ timestamp=timestamp,
+ transaction="transaction",
+ duration=100,
+ exclusive_time=100,
+ tags={"tag": tag},
+ is_eap=self.is_eap,
+ )
+
+ key = "tag"
+
+ response = self.do_request(key, query={"query": r"\*b"})
+ assert response.status_code == 200, response.data
+ assert response.data == [
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*bar",
+ "name": "*bar",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ {
+ "count": mock.ANY,
+ "key": key,
+ "value": "*baz",
+ "name": "*baz",
+ "firstSeen": mock.ANY,
+ "lastSeen": mock.ANY,
+ },
+ ]
def test_tags_keys_autocomplete_noop(self):
timestamp = before_now(days=0, minutes=10).replace(microsecond=0)
@@ -278,12 +467,14 @@ def test_tags_keys_autocomplete_noop(self):
uuid4().hex,
uuid4().hex,
span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
parent_span_id=None,
timestamp=timestamp,
transaction=tag,
duration=100,
exclusive_time=100,
tags={"tag": tag},
+ is_eap=self.is_eap,
)
for key in [
@@ -315,8 +506,13 @@ def test_tags_keys_autocomplete_project(self):
self.create_project(id=base_id + 299, name="bar")
self.create_project(id=base_id + 399, name="baz")
+ features = [
+ "organizations:performance-trace-explorer",
+ "organizations:global-views",
+ ]
+
for key in ["project", "project.name"]:
- response = self.do_request(key)
+ response = self.do_request(key, features=features)
assert response.status_code == 200, response.data
assert sorted(response.data, key=lambda v: v["value"]) == [
{
@@ -345,7 +541,7 @@ def test_tags_keys_autocomplete_project(self):
},
]
- response = self.do_request(key, query={"query": "ba"})
+ response = self.do_request(key, query={"query": "ba"}, features=features)
assert response.status_code == 200, response.data
assert sorted(response.data, key=lambda v: v["value"]) == [
{
@@ -368,7 +564,7 @@ def test_tags_keys_autocomplete_project(self):
key = "project.id"
- response = self.do_request(key)
+ response = self.do_request(key, features=features)
assert response.status_code == 200, response.data
assert sorted(response.data, key=lambda v: v["value"]) == [
{
@@ -397,7 +593,7 @@ def test_tags_keys_autocomplete_project(self):
},
]
- response = self.do_request(key, query={"query": "99"})
+ response = self.do_request(key, query={"query": "99"}, features=features)
assert response.status_code == 200, response.data
assert sorted(response.data, key=lambda v: v["value"]) == [
{
@@ -426,10 +622,12 @@ def test_tags_keys_autocomplete_span_status(self):
uuid4().hex,
uuid4().hex,
span_id=uuid4().hex[:15],
+ organization_id=self.organization.id,
parent_span_id=None,
timestamp=timestamp,
transaction="foo",
status=status,
+ is_eap=self.is_eap,
)
response = self.do_request("span.status")
@@ -481,3 +679,49 @@ def test_tags_keys_autocomplete_span_status(self):
"lastSeen": timestamp.strftime("%Y-%m-%dT%H:%M:%S+00:00"),
},
]
+
+
+class OrganizationEAPSpansTagKeyValuesEndpointTest(OrganizationSpansTagKeyValuesEndpointTest):
+ is_eap = True
+
+ def do_request(self, key: str, query=None, features=None, **kwargs):
+ if features is None:
+ features = ["organizations:performance-trace-explorer"]
+
+ features.append("organizations:visibility-explore-dataset")
+
+ if query is None:
+ query = {}
+ query["dataset"] = "spans"
+ query["type"] = "string"
+
+ with self.feature(features):
+ return self.client.get(
+ reverse(
+ self.view,
+ kwargs={"organization_id_or_slug": self.organization.slug, "key": key},
+ ),
+ query,
+ format="json",
+ **kwargs,
+ )
+
+ @pytest.mark.skip("autcomplete project doesnt work yet")
+ def test_tags_keys_autocomplete_project(self):
+ super().test_tags_keys_autocomplete_project()
+
+ @pytest.mark.skip("autcomplete span.status doesnt work yet")
+ def test_tags_keys_autocomplete_span_status(self):
+ super().test_tags_keys_autocomplete_project()
+
+ @pytest.mark.skip("autcomplete transaction doesnt work yet")
+ def test_transaction_keys_autocomplete(self):
+ super().test_transaction_keys_autocomplete()
+
+ @pytest.mark.skip("autcomplete transaction doesnt work yet")
+ def test_transaction_keys_autocomplete_substring(self):
+ super().test_transaction_keys_autocomplete_substring()
+
+ @pytest.mark.skip("autcomplete transaction doesnt work yet")
+ def test_transaction_keys_autocomplete_substring_with_asterisk(self):
+ super().test_transaction_keys_autocomplete_substring_with_asterisk()
From 9b79fb45c3761142583270819ce27184640c1ee4 Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Thu, 3 Oct 2024 08:09:00 -0700
Subject: [PATCH 067/139] ref(grouping): Combine `_assign_event_to_group` and
`_save_aggregate_new` (#78268)
Now that the optimized grouping config transition logic is enabled for everyone and we've gotten rid of the old code path (specifically `_save_aggregate`), we can do some refactoring to clean things up. Before the removal, `assign_event_to_group` conditionaly either called `_save_aggregate` or `_save_aggregate_new`. Now that the former is gone, `assign_event_to_group` is just a wrapper around `_save_aggregate_new` and there's no longer a need for them to be separate functions.
---
src/sentry/event_manager.py | 28 ++++++-------------
.../grouping/test_group_creation_lock.py | 4 +--
2 files changed, 11 insertions(+), 21 deletions(-)
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 5463cc9146937c..3314adc11af085 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -1290,24 +1290,7 @@ def get_culprit(data: Mapping[str, Any]) -> str:
@sentry_sdk.tracing.trace
-def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> GroupInfo | None:
- group_info = _save_aggregate_new(
- event=event,
- job=job,
- metric_tags=metric_tags,
- )
-
- # The only way there won't be group info is we matched to a performance, cron, replay, or
- # other-non-error-type group because of a hash collision - exceedingly unlikely, and not
- # something we've ever observed, but theoretically possible.
- if group_info:
- event.group = group_info.group
- job["groups"] = [group_info]
-
- return group_info
-
-
-def _save_aggregate_new(
+def assign_event_to_group(
event: Event,
job: Job,
metric_tags: MutableTags,
@@ -1361,6 +1344,13 @@ def _save_aggregate_new(
# erroneously create new groups.
update_grouping_config_if_needed(project, "ingest")
+ # The only way there won't be group info is we matched to a performance, cron, replay, or
+ # other-non-error-type group because of a hash collision - exceedingly unlikely, and not
+ # something we've ever observed, but theoretically possible.
+ if group_info:
+ event.group = group_info.group
+ job["groups"] = [group_info]
+
return group_info
@@ -1420,7 +1410,7 @@ def handle_existing_grouphash(
# (otherwise the update would not change anything)
#
# We think this is a very unlikely situation. A previous version of
- # _save_aggregate had races around group creation which made this race
+ # this function had races around group creation which made this race
# more user visible. For more context, see 84c6f75a and d0e22787, as
# well as GH-5085.
group = Group.objects.get(id=existing_grouphash.group_id)
diff --git a/tests/sentry/event_manager/grouping/test_group_creation_lock.py b/tests/sentry/event_manager/grouping/test_group_creation_lock.py
index 988d5e51ef4ed1..18f4fddfcdbf5d 100644
--- a/tests/sentry/event_manager/grouping/test_group_creation_lock.py
+++ b/tests/sentry/event_manager/grouping/test_group_creation_lock.py
@@ -5,7 +5,7 @@
import pytest
-from sentry.event_manager import GroupInfo, _save_aggregate_new
+from sentry.event_manager import GroupInfo, assign_event_to_group
from sentry.eventstore.models import Event
from sentry.testutils.pytest.fixtures import django_db_all
@@ -26,7 +26,7 @@ def save_event(project_id: int, return_values: list[GroupInfo]) -> None:
data={"timestamp": time.time()},
)
- group_info = _save_aggregate_new(
+ group_info = assign_event_to_group(
event=event,
job={"event_metadata": {}, "release": "dogpark", "event": event, "data": {}},
metric_tags={},
From 58e3ccdc8d0c66f51701973eff0930912a468873 Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Thu, 3 Oct 2024 08:14:39 -0700
Subject: [PATCH 068/139] feat(grouping): Store latest grouping config in
grouphash metadata (#78366)
There have been times, looking at records in the `GroupHash` table, when we've wished we knew whether a given hash was current, or just cruft left over from some older version of the grouping code or some older config now no longer in use. For `GroupHash` records with an associated `GroupHashMetadata` record, we can already sort of answer the first question, because we now have a creation date for each grouphash. This PR gives us the ability to answer the second, by adding to the metadata the grouping config used to calculate the hash.
Notes:
- The data stored is actually _latest_ grouping config, because we want to be able to distinguish between a hash originally calculated with legacy config X which is still being produced by current config Y, and a hash originally calculated with legacy config X which is no longer being produced by current config Y. In the former case, the grouphash is still actively being used to match events to its group. In the latter case, the grouphash is effectively dead - we'll never use it to match another event to the group - and so it can eventually be culled. (Be it via cronjob or one-off cleanup, we don't yet have a system which could do said culling, but at lest now we'll have the information when we need it.)
- Because there are already existing `GroupHashMetadata` records without a config set, the field is nullable. Fortunately, we know that there's only been one grouping config active during the time we've been creating records, so it will be easy to backfill the data in a follow-up PR.
---
migrations_lockfile.txt | 2 +-
src/sentry/event_manager.py | 2 +-
src/sentry/grouping/ingest/hashing.py | 24 +++++--
...d_grouping_config_to_grouphash_metadata.py | 33 +++++++++
src/sentry/models/grouphashmetadata.py | 6 ++
.../grouping/test_grouphash_metadata.py | 70 +++++++++++++++++++
6 files changed, 128 insertions(+), 9 deletions(-)
create mode 100644 src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index 2bff062a861269..69f99c692ce68b 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -10,7 +10,7 @@ hybridcloud: 0016_add_control_cacheversion
nodestore: 0002_nodestore_no_dictfield
remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
-sentry: 0770_increase_project_slug_max_length
+sentry: 0771_add_grouping_config_to_grouphash_metadata
social_auth: 0002_default_auto_field
uptime: 0016_translate_uptime_object_headers_to_lists
workflow_engine: 0008_detector_state
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 3314adc11af085..b0942eeb950c17 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -1376,7 +1376,7 @@ def get_hashes_and_grouphashes(
grouping_config, hashes = hash_calculation_function(project, job, metric_tags)
if hashes:
- grouphashes = get_or_create_grouphashes(project, hashes)
+ grouphashes = get_or_create_grouphashes(project, hashes, grouping_config["id"])
existing_grouphash = find_grouphash_with_group(grouphashes)
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index 60a19bc3b4d916..0db54211628061 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -215,7 +215,9 @@ def find_grouphash_with_group(
return None
-def get_or_create_grouphashes(project: Project, hashes: Sequence[str]) -> list[GroupHash]:
+def get_or_create_grouphashes(
+ project: Project, hashes: Sequence[str], grouping_config: str
+) -> list[GroupHash]:
grouphashes = []
for hash_value in hashes:
@@ -223,13 +225,21 @@ def get_or_create_grouphashes(project: Project, hashes: Sequence[str]) -> list[G
# TODO: Do we want to expand this to backfill metadata for existing grouphashes? If we do,
# we'll have to override the metadata creation date for them.
- if (
- created
- and options.get("grouping.grouphash_metadata.ingestion_writes_enabled")
- and features.has("organizations:grouphash-metadata-creation", project.organization)
+ if options.get("grouping.grouphash_metadata.ingestion_writes_enabled") and features.has(
+ "organizations:grouphash-metadata-creation", project.organization
):
- # For now, this just creates a record with a creation timestamp
- GroupHashMetadata.objects.create(grouphash=grouphash)
+ if created:
+ GroupHashMetadata.objects.create(
+ grouphash=grouphash,
+ latest_grouping_config=grouping_config,
+ )
+ elif (
+ grouphash.metadata and grouphash.metadata.latest_grouping_config != grouping_config
+ ):
+ # Keep track of the most recent config which computed this hash, so that once a
+ # config is deprecated, we can clear out the GroupHash records which are no longer
+ # being produced
+ grouphash.metadata.update(latest_grouping_config=grouping_config)
grouphashes.append(grouphash)
diff --git a/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py b/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py
new file mode 100644
index 00000000000000..48bfb3c9b48d42
--- /dev/null
+++ b/src/sentry/migrations/0771_add_grouping_config_to_grouphash_metadata.py
@@ -0,0 +1,33 @@
+# Generated by Django 5.1.1 on 2024-10-01 02:06
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0770_increase_project_slug_max_length"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="grouphashmetadata",
+ name="latest_grouping_config",
+ field=models.CharField(null=True),
+ ),
+ ]
diff --git a/src/sentry/models/grouphashmetadata.py b/src/sentry/models/grouphashmetadata.py
index 849293ac118160..64c6e16a940b1a 100644
--- a/src/sentry/models/grouphashmetadata.py
+++ b/src/sentry/models/grouphashmetadata.py
@@ -18,6 +18,12 @@ class GroupHashMetadata(Model):
)
date_added = models.DateTimeField(default=timezone.now)
+ # HASHING
+
+ # Most recent config to produce this hash
+ # TODO: Backfill the current config for grouphashes with metadata and then make this non-nullable
+ latest_grouping_config = models.CharField(null=True)
+
# SEER
# When this hash was sent to Seer. This will be different than `date_added` if we send it to
diff --git a/tests/sentry/event_manager/grouping/test_grouphash_metadata.py b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
index c3bbc2824a42d3..0b91f6b629cc55 100644
--- a/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
+++ b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
@@ -1,17 +1,28 @@
from __future__ import annotations
+from time import time
+
from sentry.models.grouphash import GroupHash
from sentry.models.grouphashmetadata import GroupHashMetadata
+from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers import Feature
from sentry.testutils.helpers.eventprocessing import save_new_event
+from sentry.testutils.helpers.features import with_feature
from sentry.testutils.helpers.options import override_options
from sentry.testutils.skips import requires_snuba
+from sentry.utils.types import NonNone
pytestmark = [requires_snuba]
class GroupHashMetadataTest(TestCase):
+ # Helper method to save us from having to assert the existence of `grouphash` and
+ # `grouphash.metadata` every time we want to check a value
+ def assert_metadata_value(self, grouphash, value_name, value):
+ assert grouphash and grouphash.metadata
+ assert getattr(grouphash.metadata, value_name) == value
+
def test_creates_grouphash_metadata_when_appropriate(self):
# The killswitch is obeyed
with override_options({"grouping.grouphash_metadata.ingestion_writes_enabled": False}):
@@ -44,3 +55,62 @@ def test_creates_grouphash_metadata_when_appropriate(self):
project=self.project, hash=event4.get_primary_hash()
).first()
assert grouphash and grouphash.metadata is None
+
+ @with_feature("organizations:grouphash-metadata-creation")
+ def test_stores_grouping_config(self):
+ event = save_new_event({"message": "Dogs are great!"}, self.project)
+ grouphash = GroupHash.objects.filter(
+ project=self.project, hash=event.get_primary_hash()
+ ).first()
+
+ self.assert_metadata_value(grouphash, "latest_grouping_config", DEFAULT_GROUPING_CONFIG)
+
+ @with_feature("organizations:grouphash-metadata-creation")
+ def test_updates_grouping_config(self):
+ self.project.update_option("sentry:grouping_config", LEGACY_GROUPING_CONFIG)
+
+ event1 = save_new_event({"message": "Dogs are great!"}, self.project)
+ grouphash1 = GroupHash.objects.filter(
+ project=self.project, hash=event1.get_primary_hash()
+ ).first()
+
+ self.assert_metadata_value(grouphash1, "latest_grouping_config", LEGACY_GROUPING_CONFIG)
+
+ # Update the grouping config. Since there's nothing to parameterize in the message, the
+ # result should be the same under both configs.
+ self.project.update_option("sentry:grouping_config", DEFAULT_GROUPING_CONFIG)
+
+ event2 = save_new_event({"message": "Dogs are great!"}, self.project)
+ grouphash2 = GroupHash.objects.filter(
+ project=self.project, hash=event2.get_primary_hash()
+ ).first()
+
+ self.assert_metadata_value(grouphash2, "latest_grouping_config", DEFAULT_GROUPING_CONFIG)
+
+ # Make sure we're dealing with a single grouphash that got updated rather than two different grouphashes
+ assert grouphash1 and grouphash2 and grouphash1.id == grouphash2.id
+
+ @with_feature("organizations:grouphash-metadata-creation")
+ def test_stores_correct_config_on_primary_and_secondary_hash(self):
+ # Set the project to be in a grouping config transition so that primary and secondary hashes
+ # will both be calculated, and include numbers in the message of one of the events sent to
+ # Seer so that the primary and secondary hashes will be different (since the legacy config
+ # won't parameterize the numbers)
+ self.project.update_option("sentry:grouping_config", DEFAULT_GROUPING_CONFIG)
+ self.project.update_option("sentry:secondary_grouping_config", LEGACY_GROUPING_CONFIG)
+ self.project.update_option("sentry:secondary_grouping_expiry", time() + 3600)
+
+ event = save_new_event({"message": "Dogs are great! 11211231"}, self.project)
+
+ grouphashes = GroupHash.objects.filter(group_id=NonNone(event.group_id))
+ assert len(grouphashes) == 2
+
+ primary_grouphash = grouphashes.filter(hash=event.get_primary_hash()).first()
+ secondary_grouphash = grouphashes.exclude(hash=event.get_primary_hash()).first()
+
+ self.assert_metadata_value(
+ primary_grouphash, "latest_grouping_config", DEFAULT_GROUPING_CONFIG
+ )
+ self.assert_metadata_value(
+ secondary_grouphash, "latest_grouping_config", LEGACY_GROUPING_CONFIG
+ )
From aa7ec3cc8d0e59be9b7a45073eb9a85e9040e79d Mon Sep 17 00:00:00 2001
From: Aayush Seth
Date: Thu, 3 Oct 2024 08:16:03 -0700
Subject: [PATCH 069/139] feat(anomaly detection): Feedback Button (#78543)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Add feedback buttons for anomaly thresholds on alerts dashboard and rule
settings page
- Button on alert dashboard is only visible if anomaly threshold is set
as the current rule
![Screenshot 2024-10-02 at 5 33
47 PM](https://github.com/user-attachments/assets/0c9be35c-eaf7-4f5c-8c3f-1e9c3fc545fd)
![Screenshot 2024-10-02 at 5 21
02 PM](https://github.com/user-attachments/assets/57caebdc-652b-4bbe-a8b9-2171c134b078)
---
.../alerts/rules/metric/details/sidebar.tsx | 28 ++++++++++++-
.../triggers/dynamicAlertsFeedbackButton.tsx | 41 +++++++++++++++++++
.../alerts/rules/metric/triggers/index.tsx | 5 +++
3 files changed, 73 insertions(+), 1 deletion(-)
create mode 100644 static/app/views/alerts/rules/metric/triggers/dynamicAlertsFeedbackButton.tsx
diff --git a/static/app/views/alerts/rules/metric/details/sidebar.tsx b/static/app/views/alerts/rules/metric/details/sidebar.tsx
index 7b0a63d12f2701..1bb5e141e03f0b 100644
--- a/static/app/views/alerts/rules/metric/details/sidebar.tsx
+++ b/static/app/views/alerts/rules/metric/details/sidebar.tsx
@@ -4,12 +4,13 @@ import styled from '@emotion/styled';
import {OnDemandWarningIcon} from 'sentry/components/alerts/onDemandMetricAlert';
import ActorAvatar from 'sentry/components/avatar/actorAvatar';
import AlertBadge from 'sentry/components/badge/alertBadge';
+import {Button} from 'sentry/components/button';
import {SectionHeading} from 'sentry/components/charts/styles';
import {DateTime} from 'sentry/components/dateTime';
import Duration from 'sentry/components/duration';
import {KeyValueTable, KeyValueTableRow} from 'sentry/components/keyValueTable';
import TimeSince from 'sentry/components/timeSince';
-import {IconDiamond} from 'sentry/icons';
+import {IconDiamond, IconMegaphone} from 'sentry/icons';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {ActivationConditionType, MonitorType} from 'sentry/types/alerts';
@@ -17,6 +18,7 @@ import type {Actor} from 'sentry/types/core';
import getDynamicText from 'sentry/utils/getDynamicText';
import {getSearchFilters, isOnDemandSearchKey} from 'sentry/utils/onDemandMetrics/index';
import {capitalize} from 'sentry/utils/string/capitalize';
+import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
import {COMPARISON_DELTA_OPTIONS} from 'sentry/views/alerts/rules/metric/constants';
import type {Action, MetricRule} from 'sentry/views/alerts/rules/metric/types';
import {
@@ -160,6 +162,29 @@ export function MetricDetailsSidebar({
break;
}
+ const openForm = useFeedbackForm();
+
+ const feedbackButton = openForm ? (
+ {
+ openForm({
+ formTitle: 'Anomaly Detection Feedback',
+ messagePlaceholder: t(
+ 'How can we make alerts using anomaly detection more useful?'
+ ),
+ tags: {
+ ['feedback.source']: 'dynamic_thresholding',
+ ['feedback.owner']: 'ml-ai',
+ },
+ });
+ }}
+ size="xs"
+ icon={ }
+ >
+ Give Feedback
+
+ ) : null;
+
return (
@@ -289,6 +314,7 @@ export function MetricDetailsSidebar({
)}
+ {rule.detectionType === AlertRuleComparisonType.DYNAMIC && feedbackButton}
);
}
diff --git a/static/app/views/alerts/rules/metric/triggers/dynamicAlertsFeedbackButton.tsx b/static/app/views/alerts/rules/metric/triggers/dynamicAlertsFeedbackButton.tsx
new file mode 100644
index 00000000000000..1f21a231af0023
--- /dev/null
+++ b/static/app/views/alerts/rules/metric/triggers/dynamicAlertsFeedbackButton.tsx
@@ -0,0 +1,41 @@
+import styled from '@emotion/styled';
+
+import {Button} from 'sentry/components/button';
+import {IconMegaphone} from 'sentry/icons';
+import {t} from 'sentry/locale';
+import {useFeedbackForm} from 'sentry/utils/useFeedbackForm';
+
+export default function DynamicAlertsFeedbackButton({}) {
+ const openForm = useFeedbackForm();
+
+ if (!openForm) {
+ return null;
+ }
+
+ return (
+
+ {
+ openForm({
+ formTitle: 'Anomaly Detection Feedback',
+ messagePlaceholder: t(
+ 'How can we make alerts using anomaly detection more useful?'
+ ),
+ tags: {
+ ['feedback.source']: 'dynamic_thresholding',
+ ['feedback.owner']: 'ml-ai',
+ },
+ });
+ }}
+ size="xs"
+ icon={ }
+ >
+ Give Feedback
+
+
+ );
+}
+
+const ButtonContainer = styled('div')`
+ padding: 8px 0px;
+`;
diff --git a/static/app/views/alerts/rules/metric/triggers/index.tsx b/static/app/views/alerts/rules/metric/triggers/index.tsx
index e1657b9ab163bf..d8d20b6188f586 100644
--- a/static/app/views/alerts/rules/metric/triggers/index.tsx
+++ b/static/app/views/alerts/rules/metric/triggers/index.tsx
@@ -8,6 +8,7 @@ import removeAtArrayIndex from 'sentry/utils/array/removeAtArrayIndex';
import replaceAtArrayIndex from 'sentry/utils/array/replaceAtArrayIndex';
import ActionsPanel from 'sentry/views/alerts/rules/metric/triggers/actionsPanel';
import AnomalyDetectionFormField from 'sentry/views/alerts/rules/metric/triggers/anomalyAlertsForm';
+import DynamicAlertsFeedbackButton from 'sentry/views/alerts/rules/metric/triggers/dynamicAlertsFeedbackButton';
import TriggerForm from 'sentry/views/alerts/rules/metric/triggers/form';
import {
@@ -152,6 +153,10 @@ class Triggers extends Component {
+ {comparisonType === AlertRuleComparisonType.DYNAMIC && (
+
+ )}
+
{isMigration ? null : (
Date: Thu, 3 Oct 2024 11:16:40 -0400
Subject: [PATCH 070/139] chore(issue_platform): Add a couple of feature flags
(#78546)
The related options PR can be found here:
https://github.com/getsentry/sentry-options-automator/pull/2418
---
src/sentry/features/temporary.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index cbe0a8621a1def..62626ae2071af9 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -155,6 +155,10 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:issue-details-always-show-trace", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables the UI for Autofix in issue details
manager.add("organizations:issue-details-autofix-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable Issue Platform deletion
+ manager.add("organizations:issue-platform-deletion", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable Issue Platform deletion UI
+ manager.add("organizations:issue-platform-deletion-ui", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables a toggle for entering the new issue details UI
manager.add("organizations:issue-details-new-experience-toggle", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enables access to the streamlined issue details UI
From 7f9cb24fb2b0ef68016cc8eda56cfb6e42f2c8ca Mon Sep 17 00:00:00 2001
From: Matt Duncan <14761+mrduncan@users.noreply.github.com>
Date: Thu, 3 Oct 2024 08:43:58 -0700
Subject: [PATCH 071/139] fix(issues): Remove margin-right from grouping help
(#78527)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
I assume this was unintentional.
Before
![Screenshot 2024-10-02 at 2 10
59 PM](https://github.com/user-attachments/assets/31d0beed-5e2d-4752-b81a-33c14b4997e2)
After
![Screenshot 2024-10-02 at 2 10
38 PM](https://github.com/user-attachments/assets/c384259f-16b5-47ac-8423-0cd171fa55de)
---
static/app/data/forms/projectIssueGrouping.tsx | 2 --
1 file changed, 2 deletions(-)
diff --git a/static/app/data/forms/projectIssueGrouping.tsx b/static/app/data/forms/projectIssueGrouping.tsx
index 00dfb2c4737234..30b24809d7edec 100644
--- a/static/app/data/forms/projectIssueGrouping.tsx
+++ b/static/app/data/forms/projectIssueGrouping.tsx
@@ -102,10 +102,8 @@ stack.function:mylibrary_* +app`}
const RuleDescription = styled('div')`
margin-bottom: ${space(1)};
margin-top: -${space(1)};
- margin-right: 36px;
`;
const RuleExample = styled('pre')`
margin-bottom: ${space(1)};
- margin-right: 36px;
`;
From 603a0d3fa63020a56f64e4f1a9436c43fe9f650f Mon Sep 17 00:00:00 2001
From: Jodi Jang <116035587+jangjodi@users.noreply.github.com>
Date: Thu, 3 Oct 2024 08:52:10 -0700
Subject: [PATCH 072/139] ref(similarity): Stop sending message in backfill
grouping records (#78389)
Stop sending group message in record creation call to seer in the
backfill
---
src/sentry/seer/similarity/grouping_records.py | 1 -
src/sentry/tasks/embeddings_grouping/utils.py | 1 -
tests/sentry/seer/similarity/test_grouping_records.py | 2 --
tests/sentry/tasks/test_backfill_seer_grouping_records.py | 7 -------
4 files changed, 11 deletions(-)
diff --git a/src/sentry/seer/similarity/grouping_records.py b/src/sentry/seer/similarity/grouping_records.py
index 19eb0ef0af94e3..daea9b680801d0 100644
--- a/src/sentry/seer/similarity/grouping_records.py
+++ b/src/sentry/seer/similarity/grouping_records.py
@@ -24,7 +24,6 @@ class CreateGroupingRecordData(TypedDict):
group_id: int
hash: str
project_id: int
- message: str
exception_type: str | None
diff --git a/src/sentry/tasks/embeddings_grouping/utils.py b/src/sentry/tasks/embeddings_grouping/utils.py
index 3aed6f6594e5bd..51acf27fd85035 100644
--- a/src/sentry/tasks/embeddings_grouping/utils.py
+++ b/src/sentry/tasks/embeddings_grouping/utils.py
@@ -374,7 +374,6 @@ def get_events_from_nodestore(
CreateGroupingRecordData(
group_id=group_id,
project_id=project.id,
- message=filter_null_from_string(event.title),
exception_type=(
filter_null_from_string(exception_type) if exception_type else None
),
diff --git a/tests/sentry/seer/similarity/test_grouping_records.py b/tests/sentry/seer/similarity/test_grouping_records.py
index 50b4960b1a1c4f..21fb993fa6d147 100644
--- a/tests/sentry/seer/similarity/test_grouping_records.py
+++ b/tests/sentry/seer/similarity/test_grouping_records.py
@@ -26,14 +26,12 @@
"group_id": 1,
"hash": "hash-1",
"project_id": 1,
- "message": "message",
"exception_type": "Error",
},
{
"group_id": 2,
"hash": "hash-2",
"project_id": 1,
- "message": "message 2",
"exception_type": "Error",
},
],
diff --git a/tests/sentry/tasks/test_backfill_seer_grouping_records.py b/tests/sentry/tasks/test_backfill_seer_grouping_records.py
index 9d7af966ff19ea..1aee000c81a584 100644
--- a/tests/sentry/tasks/test_backfill_seer_grouping_records.py
+++ b/tests/sentry/tasks/test_backfill_seer_grouping_records.py
@@ -183,7 +183,6 @@ def test_lookup_group_data_stacktrace_bulk_success(self, mock_metrics):
group_id=event.group.id,
hash=self.group_hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -212,7 +211,6 @@ def test_lookup_group_data_stacktrace_bulk_success_multithread(self, mock_metric
group_id=event.group.id,
hash=self.group_hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -282,7 +280,6 @@ def test_lookup_group_data_stacktrace_bulk_not_stacktrace_grouping(self):
group_id=event.group.id,
hash=hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -317,7 +314,6 @@ def test_lookup_group_data_stacktrace_bulk_no_stacktrace_exception(self):
group_id=event.group.id,
hash=hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -345,7 +341,6 @@ def test_lookup_group_data_stacktrace_bulk_with_fallback_success(self):
group_id=event.group.id,
hash=hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -383,7 +378,6 @@ def test_lookup_group_data_stacktrace_bulk_with_fallback_use_single_fallback(
group_id=event.group.id,
hash=hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
@@ -418,7 +412,6 @@ def test_lookup_group_data_stacktrace_bulk_with_fallback_event_lookup_error(self
group_id=event.group.id,
hash=hashes[event.group.id],
project_id=self.project.id,
- message=event.title,
exception_type=get_path(event.data, "exception", "values", -1, "type"),
)
for event in events
From 45a7f676a91aced20abca78143152b60b33c37ce Mon Sep 17 00:00:00 2001
From: elijames-codecov <88844267+elijames-codecov@users.noreply.github.com>
Date: Thu, 3 Oct 2024 12:31:43 -0400
Subject: [PATCH 073/139] Update alerts.html (#78046)
Update Sentry login banner to remove an old workshop and feature our
Discord community.
### Legal Boilerplate
Look, I get it. The entity doing business as "Sentry" was incorporated
in the State of Delaware in 2015 as Functional Software, Inc. and is
gonna need some rights from me in order to utilize my contributions in
this here PR. So here's the deal: I retain all rights, title and
interest in and to my contributions, and by keeping this boilerplate
intact I confirm that Sentry can use, modify, copy, and redistribute my
contributions, under Sentry's choice of terms.
---
src/sentry/templates/sentry/partial/alerts.html | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/sentry/templates/sentry/partial/alerts.html b/src/sentry/templates/sentry/partial/alerts.html
index 2a639e9cc9b0ad..ee46b00f0b91af 100644
--- a/src/sentry/templates/sentry/partial/alerts.html
+++ b/src/sentry/templates/sentry/partial/alerts.html
@@ -78,9 +78,9 @@
{% if banner_choice == 0 %}
- New workshop: Fix Your Frontend with Sentry on Sept. 24.  
RSVP.
+ Want to connect with the folks building Sentry?  
Join us on Discord.
{% elif banner_choice == 1 %}
- New workshop: Fix Your Frontend with Sentry on Sept. 24.  
RSVP.
+ Want to connect with the folks building Sentry?  
Join us on Discord.
{% endif %}
From 379abd196d2f55b628d34c34d385ba731fd9008f Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 10:06:36 -0700
Subject: [PATCH 074/139] fix(issues): Use routes to display all events
(#78435)
---
.../streamline/eventDetails.spec.tsx | 24 ++++-----
.../issueDetails/streamline/eventDetails.tsx | 24 +++------
.../streamline/eventList.spec.tsx | 50 ++++++++++++-------
.../issueDetails/streamline/eventList.tsx | 28 +++++++----
.../streamline/eventNavigation.spec.tsx | 12 +----
.../streamline/eventNavigation.tsx | 18 +++++--
6 files changed, 84 insertions(+), 72 deletions(-)
diff --git a/static/app/views/issueDetails/streamline/eventDetails.spec.tsx b/static/app/views/issueDetails/streamline/eventDetails.spec.tsx
index 979ef9ad2f0792..b9ecd66c52c9ee 100644
--- a/static/app/views/issueDetails/streamline/eventDetails.spec.tsx
+++ b/static/app/views/issueDetails/streamline/eventDetails.spec.tsx
@@ -1,9 +1,11 @@
import {EventFixture} from 'sentry-fixture/event';
import {EventsStatsFixture} from 'sentry-fixture/events';
import {GroupFixture} from 'sentry-fixture/group';
+import {LocationFixture} from 'sentry-fixture/locationFixture';
import {OrganizationFixture} from 'sentry-fixture/organization';
import {ProjectFixture} from 'sentry-fixture/project';
import {RepositoryFixture} from 'sentry-fixture/repository';
+import {RouterFixture} from 'sentry-fixture/routerFixture';
import {TagsFixture} from 'sentry-fixture/tags';
import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
@@ -151,22 +153,20 @@ describe('EventDetails', function () {
expect(mockListMeta).not.toHaveBeenCalled();
});
- it('allows toggling between event and list views', async function () {
- render( , {organization});
- await screen.findByText(event.id);
-
- const listButton = screen.getByRole('button', {name: 'View All Events'});
- await userEvent.click(listButton);
+ it('should display the events list', async function () {
+ const router = RouterFixture({
+ location: LocationFixture({
+ pathname: `/organizations/${organization.slug}/issues/${group.id}/events/`,
+ }),
+ routes: [{name: '', path: 'events/'}],
+ });
+ render( , {organization, router});
- expect(listButton).not.toBeInTheDocument();
+ expect(await screen.findByRole('button', {name: 'Close'})).toBeInTheDocument();
expect(screen.getByText('All Events')).toBeInTheDocument();
+
expect(mockList).toHaveBeenCalled();
expect(mockListMeta).toHaveBeenCalled();
- const closeButton = screen.getByRole('button', {name: 'Close'});
- await userEvent.click(closeButton);
-
- expect(closeButton).not.toBeInTheDocument();
- expect(screen.getByRole('button', {name: 'View All Events'})).toBeInTheDocument();
});
it('displays error messages from bad queries', async function () {
diff --git a/static/app/views/issueDetails/streamline/eventDetails.tsx b/static/app/views/issueDetails/streamline/eventDetails.tsx
index 499e0c71574547..02b4813d827d9e 100644
--- a/static/app/views/issueDetails/streamline/eventDetails.tsx
+++ b/static/app/views/issueDetails/streamline/eventDetails.tsx
@@ -38,11 +38,8 @@ import {
useIssueDetailsDiscoverQuery,
useIssueDetailsEventView,
} from 'sentry/views/issueDetails/streamline/useIssueDetailsDiscoverQuery';
-
-const enum EventPageContent {
- EVENT = 'event',
- LIST = 'list',
-}
+import {Tab} from 'sentry/views/issueDetails/types';
+import {useGroupDetailsRoute} from 'sentry/views/issueDetails/useGroupDetailsRoute';
export function EventDetails({
group,
@@ -61,10 +58,7 @@ export function EventDetails({
const searchQuery = useEventQuery({group});
const eventView = useIssueDetailsEventView({group});
-
- const [pageContent, setPageContent] = useState(
- EventPageContent.EVENT
- );
+ const {currentTab} = useGroupDetailsRoute();
const {
data: groupStats,
@@ -139,18 +133,15 @@ export function EventDetails({
)}
)}
- {pageContent === EventPageContent.LIST && (
+ {/* TODO(issues): We should use the router for this */}
+ {currentTab === Tab.EVENTS && (
- setPageContent(EventPageContent.EVENT)}
- />
+
)}
- {pageContent === EventPageContent.EVENT && (
+ {currentTab !== Tab.EVENTS && (
setPageContent(EventPageContent.LIST)}
data-stuck={isStuck}
/>
diff --git a/static/app/views/issueDetails/streamline/eventList.spec.tsx b/static/app/views/issueDetails/streamline/eventList.spec.tsx
index 9b09b3dfc194e9..25b0a9c9725000 100644
--- a/static/app/views/issueDetails/streamline/eventList.spec.tsx
+++ b/static/app/views/issueDetails/streamline/eventList.spec.tsx
@@ -7,16 +7,20 @@ import {ProjectFixture} from 'sentry-fixture/project';
import {RouterFixture} from 'sentry-fixture/routerFixture';
import {TagsFixture} from 'sentry-fixture/tags';
-import {render, renderHook, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+import {
+ render,
+ renderHook,
+ screen,
+ userEvent,
+ waitFor,
+} from 'sentry-test/reactTestingLibrary';
import PageFiltersStore from 'sentry/stores/pageFiltersStore';
import ProjectsStore from 'sentry/stores/projectsStore';
-import {useLocation} from 'sentry/utils/useLocation';
import {useEventColumns} from 'sentry/views/issueDetails/allEventsTable';
import {EventDetails} from 'sentry/views/issueDetails/streamline/eventDetails';
import {MOCK_EVENTS_TABLE_DATA} from 'sentry/views/performance/transactionSummary/transactionEvents/testUtils';
-jest.mock('sentry/utils/useLocation');
jest.mock('sentry/components/events/suspectCommits');
jest.mock('sentry/views/issueDetails/groupEventDetails/groupEventDetailsContent');
jest.mock('sentry/views/issueDetails/streamline/issueContent');
@@ -28,7 +32,6 @@ jest.mock('screenfull', () => ({
on: jest.fn(),
off: jest.fn(),
}));
-const mockUseLocation = jest.mocked(useLocation);
describe('EventList', () => {
const organization = OrganizationFixture();
@@ -44,7 +47,6 @@ describe('EventList', () => {
let mockEventListMeta: jest.Mock;
beforeEach(() => {
- mockUseLocation.mockReturnValue(LocationFixture());
PageFiltersStore.init();
PageFiltersStore.onInitializeUrlState(
{
@@ -105,19 +107,24 @@ describe('EventList', () => {
});
});
- async function renderAndSwitchToAllEvents() {
+ function renderAllEvents() {
render( , {
organization,
- router: RouterFixture({location: LocationFixture()}),
+ router: RouterFixture({
+ location: LocationFixture({
+ pathname: `/organizations/${organization.slug}/issues/${group.id}/events/`,
+ }),
+ routes: [{name: '', path: 'events/'}],
+ }),
});
- await screen.findByText(event.id);
- await userEvent.click(screen.getByRole('button', {name: 'View All Events'}));
}
it('renders the list using a discover event query', async function () {
- await renderAndSwitchToAllEvents();
+ renderAllEvents();
const {result} = renderHook(() => useEventColumns(group, organization));
+ expect(await screen.findByText('All Events')).toBeInTheDocument();
+
expect(mockEventList).toHaveBeenCalledWith(
'/organizations/org-slug/events/',
expect.objectContaining({
@@ -135,7 +142,6 @@ describe('EventList', () => {
);
expect(mockEventListMeta).toHaveBeenCalled();
- expect(screen.getByText('All Events')).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Previous Page'})).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Next Page'})).toBeInTheDocument();
expect(screen.getByRole('button', {name: 'Close'})).toBeInTheDocument();
@@ -151,7 +157,7 @@ describe('EventList', () => {
});
it('allows filtering by environment', async function () {
- await renderAndSwitchToAllEvents();
+ renderAllEvents();
await userEvent.click(screen.getByRole('button', {name: 'All Envs'}));
await userEvent.click(screen.getByRole('row', {name: 'production'}));
@@ -175,10 +181,16 @@ describe('EventList', () => {
query: `${tagKey}:${tagValue}`,
},
};
- mockUseLocation.mockReset();
- mockUseLocation.mockReturnValue(LocationFixture(locationQuery));
-
- await renderAndSwitchToAllEvents();
+ render( , {
+ organization,
+ router: RouterFixture({
+ location: LocationFixture({
+ pathname: `/organizations/${organization.slug}/issues/${group.id}/events/`,
+ query: locationQuery.query,
+ }),
+ routes: [{name: '', path: 'events/'}],
+ }),
+ });
const expectedArgs = [
'/organizations/org-slug/events/',
@@ -188,12 +200,14 @@ describe('EventList', () => {
}),
}),
];
- expect(mockEventList).toHaveBeenCalledWith(...expectedArgs);
+ await waitFor(() => {
+ expect(mockEventList).toHaveBeenCalledWith(...expectedArgs);
+ });
expect(mockEventListMeta).toHaveBeenCalledWith(...expectedArgs);
});
it('allows filtering by date', async function () {
- await renderAndSwitchToAllEvents();
+ renderAllEvents();
await userEvent.click(screen.getByRole('button', {name: '14D'}));
await userEvent.click(screen.getByRole('option', {name: 'Last 7 days'}));
diff --git a/static/app/views/issueDetails/streamline/eventList.tsx b/static/app/views/issueDetails/streamline/eventList.tsx
index 347208665bb570..2afb7bc0ee472c 100644
--- a/static/app/views/issueDetails/streamline/eventList.tsx
+++ b/static/app/views/issueDetails/streamline/eventList.tsx
@@ -2,7 +2,7 @@ import {useState} from 'react';
import {css, useTheme} from '@emotion/react';
import styled from '@emotion/styled';
-import {Button, LinkButton} from 'sentry/components/button';
+import {LinkButton} from 'sentry/components/button';
import ButtonBar from 'sentry/components/buttonBar';
import {
GridBodyCell,
@@ -24,15 +24,15 @@ import {useRoutes} from 'sentry/utils/useRoutes';
import {useEventColumns} from 'sentry/views/issueDetails/allEventsTable';
import {ALL_EVENTS_EXCLUDED_TAGS} from 'sentry/views/issueDetails/groupEvents';
import {useIssueDetailsEventView} from 'sentry/views/issueDetails/streamline/useIssueDetailsDiscoverQuery';
+import {useGroupDetailsRoute} from 'sentry/views/issueDetails/useGroupDetailsRoute';
import EventsTable from 'sentry/views/performance/transactionSummary/transactionEvents/eventsTable';
interface EventListProps {
group: Group;
project: Project;
- onClose?: (e: React.MouseEvent) => void;
}
-export function EventList({group, onClose}: EventListProps) {
+export function EventList({group}: EventListProps) {
const referrer = 'issue_details.streamline_list';
const theme = useTheme();
const location = useLocation();
@@ -41,6 +41,7 @@ export function EventList({group, onClose}: EventListProps) {
const [_error, setError] = useState('');
const {fields, columnTitles} = useEventColumns(group, organization);
const eventView = useIssueDetailsEventView({group, queryProps: {fields}});
+ const {baseUrl} = useGroupDetailsRoute();
const grayText = css`
color: ${theme.subText};
@@ -126,13 +127,20 @@ export function EventList({group, onClose}: EventListProps) {
/>
- {onClose && (
-
-
- {t('Close')}
-
-
- )}
+
+
+
+ {t('Close')}
+
+
);
}}
diff --git a/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx b/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
index 487044d6417b3b..9acab709dac6ba 100644
--- a/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
+++ b/static/app/views/issueDetails/streamline/eventNavigation.spec.tsx
@@ -27,10 +27,9 @@ describe('EventNavigation', () => {
previousEventID: 'prev-event-id',
nextEventID: 'next-event-id',
});
- const defaultProps = {
+ const defaultProps: React.ComponentProps = {
event: testEvent,
group: GroupFixture({id: 'group-id'}),
- onViewAllEvents: jest.fn(),
};
beforeEach(() => {
@@ -107,15 +106,6 @@ describe('EventNavigation', () => {
});
});
- it('can runs callback on view all events click', async () => {
- render( );
- expect(defaultProps.onViewAllEvents).not.toHaveBeenCalled();
- const viewAllButton = screen.getByRole('button', {name: 'View All Events'});
- expect(viewAllButton).toBeInTheDocument();
- await userEvent.click(viewAllButton);
- expect(defaultProps.onViewAllEvents).toHaveBeenCalled();
- });
-
it('can navigate next/previous events', () => {
render( );
diff --git a/static/app/views/issueDetails/streamline/eventNavigation.tsx b/static/app/views/issueDetails/streamline/eventNavigation.tsx
index 46011297b95f34..967ef649339337 100644
--- a/static/app/views/issueDetails/streamline/eventNavigation.tsx
+++ b/static/app/views/issueDetails/streamline/eventNavigation.tsx
@@ -37,6 +37,8 @@ import {
useEventDetails,
} from 'sentry/views/issueDetails/streamline/context';
import {getFoldSectionKey} from 'sentry/views/issueDetails/streamline/foldSection';
+import {Tab, TabPaths} from 'sentry/views/issueDetails/types';
+import {useGroupDetailsRoute} from 'sentry/views/issueDetails/useGroupDetailsRoute';
import {useDefaultIssueEvent} from 'sentry/views/issueDetails/utils';
export const MIN_NAV_HEIGHT = 44;
@@ -44,7 +46,6 @@ export const MIN_NAV_HEIGHT = 44;
type EventNavigationProps = {
event: Event;
group: Group;
- onViewAllEvents: (e: React.MouseEvent) => void;
className?: string;
/**
* Data property to help style the component when it's sticky
@@ -89,7 +90,7 @@ const sectionLabels = {
};
export const EventNavigation = forwardRef(
- function EventNavigation({event, group, query, onViewAllEvents, ...props}, ref) {
+ function EventNavigation({event, group, query, ...props}, ref) {
const location = useLocation();
const organization = useOrganization();
const theme = useTheme();
@@ -104,6 +105,7 @@ export const EventNavigation = forwardRef(
true
);
const isMobile = useMedia(`(max-width: ${theme.breakpoints.small})`);
+ const {baseUrl} = useGroupDetailsRoute();
const {data: actionableItems} = useActionableItems({
eventId: event.id,
@@ -225,9 +227,17 @@ export const EventNavigation = forwardRef(
/>
-
+
{isMobile ? '' : t('View')} {t('All Events')}
-
+
From 9ad092804040f429fd05fb8dea7fb02099cfdac6 Mon Sep 17 00:00:00 2001
From: Priscila Oliveira
Date: Thu, 3 Oct 2024 19:17:19 +0200
Subject: [PATCH 075/139] fix(breadcrumbs): Fix content not taking full
available space (#78555)
---
.../breadcrumbs/breadcrumbsDataSection.tsx | 20 +++++++++++++++++--
1 file changed, 18 insertions(+), 2 deletions(-)
diff --git a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx
index e4c6e031e511a7..9f3bc3a96b9f98 100644
--- a/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx
+++ b/static/app/components/events/breadcrumbs/breadcrumbsDataSection.tsx
@@ -1,4 +1,5 @@
import {useCallback, useMemo, useRef, useState} from 'react';
+import {ClassNames} from '@emotion/react';
import styled from '@emotion/styled';
import GuideAnchor from 'sentry/components/assistant/guideAnchor';
@@ -151,7 +152,7 @@ export default function BreadcrumbsDataSection({
const hasViewAll = summaryCrumbs.length !== enhancedCrumbs.length;
return (
-
+
-
+
);
}
@@ -222,3 +223,18 @@ const VerticalEllipsis = styled(IconEllipsis)`
const ViewAllButton = styled(Button)`
padding: ${space(0.75)} ${space(1)};
`;
+
+function FullWidthGuideAnchor(props: React.ComponentProps) {
+ return (
+
+ {({css: classNamesCss}) => (
+
+ )}
+
+ );
+}
From 999eeafeff6ae4b616bafa1f4225ca52ac2d1bea Mon Sep 17 00:00:00 2001
From: Dan Fuller
Date: Thu, 3 Oct 2024 10:21:36 -0700
Subject: [PATCH 076/139] feat(workflow_engine): Add type column to detector
(#78533)
We're going to have some way to have different logic for detectors.
However we implement it, we'll be using this type column.
---
migrations_lockfile.txt | 2 +-
.../migrations/0009_detector_type.py | 37 +
src/sentry/workflow_engine/models/detector.py | 1 +
.../ReleaseTests/test_at_head.pysnap | 765 +++++++++---------
4 files changed, 422 insertions(+), 383 deletions(-)
create mode 100644 src/sentry/workflow_engine/migrations/0009_detector_type.py
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index 69f99c692ce68b..4b95fa70a4d7ca 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -13,4 +13,4 @@ replays: 0004_index_together
sentry: 0771_add_grouping_config_to_grouphash_metadata
social_auth: 0002_default_auto_field
uptime: 0016_translate_uptime_object_headers_to_lists
-workflow_engine: 0008_detector_state
+workflow_engine: 0009_detector_type
diff --git a/src/sentry/workflow_engine/migrations/0009_detector_type.py b/src/sentry/workflow_engine/migrations/0009_detector_type.py
new file mode 100644
index 00000000000000..25495c3435ebfc
--- /dev/null
+++ b/src/sentry/workflow_engine/migrations/0009_detector_type.py
@@ -0,0 +1,37 @@
+# Generated by Django 5.1.1 on 2024-10-02 22:26
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+ # If you're copying this migration don't do this. It's dangerous to disable the checks unless you know what you're
+ # doing.
+ checked = False
+
+ dependencies = [
+ ("workflow_engine", "0008_detector_state"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="detector",
+ name="type",
+ field=models.CharField(max_length=200),
+ preserve_default=False,
+ ),
+ ]
diff --git a/src/sentry/workflow_engine/models/detector.py b/src/sentry/workflow_engine/models/detector.py
index 9c0c9c37fac8df..e2d988a93d9c60 100644
--- a/src/sentry/workflow_engine/models/detector.py
+++ b/src/sentry/workflow_engine/models/detector.py
@@ -26,6 +26,7 @@ class Detector(DefaultFieldsModel, OwnerModel):
unique=True,
on_delete=models.SET_NULL,
)
+ type = models.CharField(max_length=200)
class Meta(OwnerModel.Meta):
constraints = OwnerModel.Meta.constraints + [
diff --git a/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap b/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap
index 85674f03fa3c54..31f11b90982168 100644
--- a/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap
+++ b/tests/sentry/backup/snapshots/ReleaseTests/test_at_head.pysnap
@@ -1,18 +1,18 @@
---
-created: '2024-09-30T23:40:25.265684+00:00'
+created: '2024-10-02T23:17:50.255952+00:00'
creator: sentry
source: tests/sentry/backup/test_releases.py
---
- fields:
key: bar
- last_updated: '2024-09-30T23:40:24.919Z'
+ last_updated: '2024-10-02T23:17:49.920Z'
last_updated_by: unknown
value: '"b"'
model: sentry.controloption
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.363Z'
- date_updated: '2024-09-30T23:40:24.363Z'
+ date_added: '2024-10-02T23:17:49.376Z'
+ date_updated: '2024-10-02T23:17:49.376Z'
external_id: slack:test-org
metadata: {}
name: Slack for test-org
@@ -22,13 +22,13 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
key: foo
- last_updated: '2024-09-30T23:40:24.918Z'
+ last_updated: '2024-10-02T23:17:49.919Z'
last_updated_by: unknown
value: '"a"'
model: sentry.option
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.697Z'
+ date_added: '2024-10-02T23:17:48.714Z'
default_role: member
flags: '1'
is_test: false
@@ -36,40 +36,40 @@ source: tests/sentry/backup/test_releases.py
slug: test-org
status: 0
model: sentry.organization
- pk: 4554792498561024
+ pk: 4554803734380544
- fields:
- date_added: '2024-09-30T23:40:24.579Z'
+ date_added: '2024-10-02T23:17:49.561Z'
default_role: member
flags: '1'
is_test: false
- name: Tough Tetra
- slug: tough-tetra
+ name: Topical Tick
+ slug: topical-tick
status: 0
model: sentry.organization
- pk: 4554792498626563
+ pk: 4554803734446083
- fields:
config:
hello: hello
- date_added: '2024-09-30T23:40:24.364Z'
- date_updated: '2024-09-30T23:40:24.364Z'
+ date_added: '2024-10-02T23:17:49.377Z'
+ date_updated: '2024-10-02T23:17:49.377Z'
default_auth_id: null
grace_period_end: null
integration: 1
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
status: 0
model: sentry.organizationintegration
pk: 1
- fields:
key: sentry:account-rate-limit
- organization: 4554792498561024
+ organization: 4554803734380544
value: 0
model: sentry.organizationoption
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.224Z'
- date_updated: '2024-09-30T23:40:24.224Z'
+ date_added: '2024-10-02T23:17:49.233Z'
+ date_updated: '2024-10-02T23:17:49.233Z'
name: template-test-org
- organization: 4554792498561024
+ organization: 4554803734380544
model: sentry.projecttemplate
pk: 1
- fields:
@@ -82,44 +82,44 @@ source: tests/sentry/backup/test_releases.py
first_seen: null
is_internal: true
last_seen: null
- public_key: GxT9ZU-UU1rKXPzC4GCMdAWmEClya8TEqBZ7-aoCBKg
- relay_id: a65e38d8-719a-499b-b901-1706e1a5e0f4
+ public_key: 10vr5MmbX5dt_f2zbApVzEugwsVTj_oLFyze24bZunc
+ relay_id: acad7fea-8b1f-4fc6-8c1a-775523478717
model: sentry.relay
pk: 1
- fields:
- first_seen: '2024-09-30T23:40:24.917Z'
- last_seen: '2024-09-30T23:40:24.917Z'
- public_key: GxT9ZU-UU1rKXPzC4GCMdAWmEClya8TEqBZ7-aoCBKg
- relay_id: a65e38d8-719a-499b-b901-1706e1a5e0f4
+ first_seen: '2024-10-02T23:17:49.918Z'
+ last_seen: '2024-10-02T23:17:49.918Z'
+ public_key: 10vr5MmbX5dt_f2zbApVzEugwsVTj_oLFyze24bZunc
+ relay_id: acad7fea-8b1f-4fc6-8c1a-775523478717
version: 0.0.1
model: sentry.relayusage
pk: 1
- fields:
config: {}
- date_added: '2024-09-30T23:40:24.530Z'
+ date_added: '2024-10-02T23:17:49.512Z'
external_id: https://git.example.com:1234
integration_id: 1
languages: '[]'
name: getsentry/getsentry
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
provider: integrations:github
status: 0
url: https://github.com/getsentry/getsentry
model: sentry.repository
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.170Z'
+ date_added: '2024-10-02T23:17:49.177Z'
idp_provisioned: false
name: test_team_in_test-org
- organization: 4554792498561024
+ organization: 4554803734380544
slug: test_team_in_test-org
status: 0
model: sentry.team
- pk: 4554792498626560
+ pk: 4554803734446080
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.536Z'
+ date_joined: '2024-10-02T23:17:48.548Z'
email: superadmin
flags: '0'
is_active: true
@@ -129,11 +129,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: true
is_superuser: true
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.536Z'
+ last_active: '2024-10-02T23:17:48.548Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.536Z'
+ last_password_change: '2024-10-02T23:17:48.548Z'
name: ''
- password: md5$cauNedgmtAdshasPBcoD2W$85fcdbdf6aa82f738408c689979212b1
+ password: md5$tEih2qVxH7h2AJaKs00jJh$bb81d4cd312709a22eff544107f3fe3b
session_nonce: null
username: superadmin
model: sentry.user
@@ -141,7 +141,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.606Z'
+ date_joined: '2024-10-02T23:17:48.623Z'
email: owner
flags: '0'
is_active: true
@@ -151,11 +151,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.606Z'
+ last_active: '2024-10-02T23:17:48.623Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.606Z'
+ last_password_change: '2024-10-02T23:17:48.623Z'
name: ''
- password: md5$NjcV22LILT9SWn5nKZWeci$858af55c5254782428783bda517c8b17
+ password: md5$SOBbxqlYwG06OZCaKczsTv$663b36df391ee5455bbf85c3d9cd5a65
session_nonce: null
username: owner
model: sentry.user
@@ -163,7 +163,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.624Z'
+ date_joined: '2024-10-02T23:17:48.642Z'
email: member
flags: '0'
is_active: true
@@ -173,11 +173,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.624Z'
+ last_active: '2024-10-02T23:17:48.642Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.624Z'
+ last_password_change: '2024-10-02T23:17:48.642Z'
name: ''
- password: md5$QlB2CCYJq8fryrCecGh4ca$1842215ca2b783493d70389166ac2fde
+ password: md5$di7yZ2yRm5W9Q1IEvbHOc0$1ca9eab55e9907cf4d1155ce7edd2a9c
session_nonce: null
username: member
model: sentry.user
@@ -185,7 +185,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.644Z'
+ date_joined: '2024-10-02T23:17:48.661Z'
email: added-by-superadmin-not-in-org
flags: '0'
is_active: true
@@ -195,11 +195,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.644Z'
+ last_active: '2024-10-02T23:17:48.661Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.644Z'
+ last_password_change: '2024-10-02T23:17:48.661Z'
name: ''
- password: md5$NVD26d9oUaduDMqwB21Jdj$36f603fdef7fdd2b104f6df8f342f747
+ password: md5$td9ebTaf1kJzd6GV6ygggX$545a7f1affdd036480cd5c828ae3dd24
session_nonce: null
username: added-by-superadmin-not-in-org
model: sentry.user
@@ -207,7 +207,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.662Z'
+ date_joined: '2024-10-02T23:17:48.679Z'
email: added-by-org-owner
flags: '0'
is_active: true
@@ -217,11 +217,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.662Z'
+ last_active: '2024-10-02T23:17:48.679Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.662Z'
+ last_password_change: '2024-10-02T23:17:48.679Z'
name: ''
- password: md5$m46wxsmGboiCiqDHin8i7Z$4bc1e0918ed81e137d52ee262e91d3bf
+ password: md5$I50LK31xIez2n5qrU97Z07$3ad9cd8889acc4cb7ecb1e18ae9a329f
session_nonce: null
username: added-by-org-owner
model: sentry.user
@@ -229,7 +229,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:23.678Z'
+ date_joined: '2024-10-02T23:17:48.697Z'
email: added-by-org-member
flags: '0'
is_active: true
@@ -239,11 +239,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:23.678Z'
+ last_active: '2024-10-02T23:17:48.697Z'
last_login: null
- last_password_change: '2024-09-30T23:40:23.679Z'
+ last_password_change: '2024-10-02T23:17:48.697Z'
name: ''
- password: md5$mcp56TsIcnuS8hpXjeS3DQ$72af12c61cc63928a7d2e1a5ce377384
+ password: md5$irGhooUCKDer2u9nwiCs7v$64f08265a86aab0d102fcb2efd623add
session_nonce: null
username: added-by-org-member
model: sentry.user
@@ -251,7 +251,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:24.466Z'
+ date_joined: '2024-10-02T23:17:49.448Z'
email: admin@localhost
flags: '0'
is_active: true
@@ -261,11 +261,11 @@ source: tests/sentry/backup/test_releases.py
is_staff: true
is_superuser: true
is_unclaimed: false
- last_active: '2024-09-30T23:40:24.466Z'
+ last_active: '2024-10-02T23:17:49.448Z'
last_login: null
- last_password_change: '2024-09-30T23:40:24.466Z'
+ last_password_change: '2024-10-02T23:17:49.448Z'
name: ''
- password: md5$Vy4CWlTVWQ2uLdl8HYjZU9$7e61a9479ba3209fbcc15c2e86e5d265
+ password: md5$LYD9SCQ6xE4IY2nFysLcVb$f87a5e8af25a96bd4c189962adca958c
session_nonce: null
username: admin@localhost
model: sentry.user
@@ -273,8 +273,8 @@ source: tests/sentry/backup/test_releases.py
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:24.568Z'
- email: 6f1e4a94e49d4f258e33a5b0bc62ad57@example.com
+ date_joined: '2024-10-02T23:17:49.549Z'
+ email: ab2de483556e4012a3d8434fa945fae2@example.com
flags: '0'
is_active: true
is_managed: false
@@ -283,19 +283,19 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:24.568Z'
+ last_active: '2024-10-02T23:17:49.549Z'
last_login: null
- last_password_change: '2024-09-30T23:40:24.568Z'
+ last_password_change: '2024-10-02T23:17:49.549Z'
name: ''
- password: md5$hnKF1ECaLfMoPJa5M6DuG9$fa17e5d2eb5011a3fcd89da0eda39993
+ password: md5$82aoIsfX2O8izM1mwFVyFi$8a057901050c59893bb27b04c65eeed5
session_nonce: null
- username: 6f1e4a94e49d4f258e33a5b0bc62ad57@example.com
+ username: ab2de483556e4012a3d8434fa945fae2@example.com
model: sentry.user
pk: 8
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:24.636Z'
+ date_joined: '2024-10-02T23:17:49.621Z'
email: ''
flags: '0'
is_active: true
@@ -305,20 +305,20 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:24.636Z'
+ last_active: '2024-10-02T23:17:49.621Z'
last_login: null
last_password_change: null
name: ''
password: ''
session_nonce: null
- username: test-app-2a32964a-0ca8-4a87-aae8-ea47fc255ea9
+ username: test-app-10ad8068-faea-47a5-a0af-530d3620c0ec
model: sentry.user
pk: 9
- fields:
avatar_type: 0
avatar_url: null
- date_joined: '2024-09-30T23:40:24.848Z'
- email: a33698fb604d4840a0c740d2483b01fa@example.com
+ date_joined: '2024-10-02T23:17:49.847Z'
+ email: 6fc445fcc0eb4ac789ca5761c1949479@example.com
flags: '0'
is_active: true
is_managed: false
@@ -327,13 +327,13 @@ source: tests/sentry/backup/test_releases.py
is_staff: false
is_superuser: false
is_unclaimed: false
- last_active: '2024-09-30T23:40:24.848Z'
+ last_active: '2024-10-02T23:17:49.847Z'
last_login: null
- last_password_change: '2024-09-30T23:40:24.848Z'
+ last_password_change: '2024-10-02T23:17:49.847Z'
name: ''
- password: md5$DwsnnvLeEcRCZOpRO0Aq3Y$0f220bb3f46db92030271516c80398c4
+ password: md5$43HpEC9gG9nW5TPKJbKFLY$445fb9a3cc9768ea22210435921207b0
session_nonce: null
- username: a33698fb604d4840a0c740d2483b01fa@example.com
+ username: 6fc445fcc0eb4ac789ca5761c1949479@example.com
model: sentry.user
pk: 10
- fields:
@@ -396,86 +396,87 @@ source: tests/sentry/backup/test_releases.py
model: sentry.userpermission
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.562Z'
- date_updated: '2024-09-30T23:40:23.562Z'
+ date_added: '2024-10-02T23:17:48.577Z'
+ date_updated: '2024-10-02T23:17:48.577Z'
name: test-admin-role
permissions: '[]'
model: sentry.userrole
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.568Z'
- date_updated: '2024-09-30T23:40:23.568Z'
+ date_added: '2024-10-02T23:17:48.582Z'
+ date_updated: '2024-10-02T23:17:48.582Z'
role: 1
user: 1
model: sentry.userroleuser
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.557Z'
- date_updated: '2024-09-30T23:40:24.557Z'
+ date_added: '2024-10-02T23:17:49.538Z'
+ date_updated: '2024-10-02T23:17:49.538Z'
logic_type: any
- organization: 4554792498561024
+ organization: 4554803734380544
model: workflow_engine.dataconditiongroup
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.564Z'
- date_updated: '2024-09-30T23:40:24.564Z'
+ date_added: '2024-10-02T23:17:49.546Z'
+ date_updated: '2024-10-02T23:17:49.546Z'
logic_type: all
- organization: 4554792498561024
+ organization: 4554803734380544
model: workflow_engine.dataconditiongroup
pk: 2
- fields:
- date_added: '2024-09-30T23:40:24.562Z'
- date_updated: '2024-09-30T23:40:24.562Z'
- organization: 4554792498561024
- query_id: 3333
+ date_added: '2024-10-02T23:17:49.543Z'
+ date_updated: '2024-10-02T23:17:49.543Z'
+ organization: 4554803734380544
+ query_id: 1553
type: 1
model: workflow_engine.datasource
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.555Z'
- date_updated: '2024-09-30T23:40:24.555Z'
- name: Enabled Dog
- organization: 4554792498561024
+ date_added: '2024-10-02T23:17:49.534Z'
+ date_updated: '2024-10-02T23:17:49.534Z'
+ name: Just Condor
+ organization: 4554803734380544
owner_team: null
owner_user_id: null
+ type: ''
workflow_condition_group: null
model: workflow_engine.detector
pk: 1
- fields:
active: false
- date_added: '2024-09-30T23:40:24.557Z'
- date_updated: '2024-09-30T23:40:24.557Z'
+ date_added: '2024-10-02T23:17:49.537Z'
+ date_updated: '2024-10-02T23:17:49.537Z'
detector: 1
detector_group_key: null
state: ok
model: workflow_engine.detectorstate
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.553Z'
- date_updated: '2024-09-30T23:40:24.553Z'
- name: Sweet Slug
- organization: 4554792498561024
+ date_added: '2024-10-02T23:17:49.533Z'
+ date_updated: '2024-10-02T23:17:49.533Z'
+ name: Maximum Wren
+ organization: 4554803734380544
when_condition_group: null
model: workflow_engine.workflow
pk: 1
- fields:
condition_group: 1
- date_added: '2024-09-30T23:40:24.561Z'
- date_updated: '2024-09-30T23:40:24.561Z'
+ date_added: '2024-10-02T23:17:49.542Z'
+ date_updated: '2024-10-02T23:17:49.542Z'
workflow: 1
model: workflow_engine.workflowdataconditiongroup
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.556Z'
- date_updated: '2024-09-30T23:40:24.556Z'
+ date_added: '2024-10-02T23:17:49.536Z'
+ date_updated: '2024-10-02T23:17:49.536Z'
detector: 1
workflow: 1
model: workflow_engine.detectorworkflow
pk: 1
- fields:
data_source: 1
- date_added: '2024-09-30T23:40:24.563Z'
- date_updated: '2024-09-30T23:40:24.563Z'
+ date_added: '2024-10-02T23:17:49.544Z'
+ date_updated: '2024-10-02T23:17:49.544Z'
detector: 1
model: workflow_engine.datasourcedetector
pk: 1
@@ -484,8 +485,8 @@ source: tests/sentry/backup/test_releases.py
condition: eq
condition_group: 1
condition_result: 'True'
- date_added: '2024-09-30T23:40:24.560Z'
- date_updated: '2024-09-30T23:40:24.560Z'
+ date_added: '2024-10-02T23:17:49.541Z'
+ date_updated: '2024-10-02T23:17:49.541Z'
type: WorkflowCondition
model: workflow_engine.datacondition
pk: 1
@@ -494,16 +495,16 @@ source: tests/sentry/backup/test_releases.py
condition: eq
condition_group: 2
condition_result: 'True'
- date_added: '2024-09-30T23:40:24.567Z'
- date_updated: '2024-09-30T23:40:24.567Z'
+ date_added: '2024-10-02T23:17:49.548Z'
+ date_updated: '2024-10-02T23:17:49.548Z'
type: DetectorCondition
model: workflow_engine.datacondition
pk: 2
- fields:
- date_added: '2024-09-30T23:40:24.523Z'
+ date_added: '2024-10-02T23:17:49.505Z'
is_global: false
name: Saved query for test-org
- organization: 4554792498561024
+ organization: 4554803734380544
owner_id: 2
query: saved query for test-org
sort: date
@@ -512,9 +513,9 @@ source: tests/sentry/backup/test_releases.py
model: sentry.savedsearch
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.522Z'
- last_seen: '2024-09-30T23:40:24.522Z'
- organization: 4554792498561024
+ date_added: '2024-10-02T23:17:49.504Z'
+ last_seen: '2024-10-02T23:17:49.504Z'
+ organization: 4554803734380544
query: some query for test-org
query_hash: 7c69362cd42207b83f80087bc15ebccb
type: 0
@@ -522,82 +523,82 @@ source: tests/sentry/backup/test_releases.py
model: sentry.recentsearch
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.226Z'
+ date_added: '2024-10-02T23:17:49.236Z'
first_event: null
flags: '10'
forced_color: null
name: project-test-org
- organization: 4554792498561024
+ organization: 4554803734380544
platform: null
public: false
slug: project-test-org
status: 0
template: null
model: sentry.project
- pk: 4554792498626561
+ pk: 4554803734446081
- fields:
- date_added: '2024-09-30T23:40:24.424Z'
+ date_added: '2024-10-02T23:17:49.406Z'
first_event: null
flags: '10'
forced_color: null
name: other-project-test-org
- organization: 4554792498561024
+ organization: 4554803734380544
platform: null
public: false
slug: other-project-test-org
status: 0
template: null
model: sentry.project
- pk: 4554792498626562
+ pk: 4554803734446082
- fields:
- date_added: '2024-09-30T23:40:24.658Z'
+ date_added: '2024-10-02T23:17:49.641Z'
first_event: null
flags: '10'
forced_color: null
- name: Unique Bluebird
- organization: 4554792498561024
+ name: Neat Crab
+ organization: 4554803734380544
platform: null
public: false
- slug: unique-bluebird
+ slug: neat-crab
status: 0
template: null
model: sentry.project
- pk: 4554792498626564
+ pk: 4554803734446084
- fields:
- date_added: '2024-09-30T23:40:24.859Z'
+ date_added: '2024-10-02T23:17:49.859Z'
first_event: null
flags: '10'
forced_color: null
- name: Intent Wildcat
- organization: 4554792498561024
+ name: Trusty Mastodon
+ organization: 4554803734380544
platform: null
public: false
- slug: intent-wildcat
+ slug: trusty-mastodon
status: 0
template: null
model: sentry.project
- pk: 4554792498626565
+ pk: 4554803734446085
- fields:
created_by: 2
- date_added: '2024-09-30T23:40:24.341Z'
+ date_added: '2024-10-02T23:17:49.351Z'
date_deactivated: null
date_last_used: null
name: token 1 for test-org
- organization_id: 4554792498561024
- project_last_used_id: 4554792498626561
+ organization_id: 4554803734380544
+ project_last_used_id: 4554803734446081
scope_list: '[''org:ci'']'
token_hashed: ABCDEFtest-org
token_last_characters: xyz1
model: sentry.orgauthtoken
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.757Z'
+ date_added: '2024-10-02T23:17:48.775Z'
email: null
flags: '0'
has_global_access: true
invite_status: 0
inviter_id: null
- organization: 4554792498561024
+ organization: 4554803734380544
role: owner
token: null
token_expires_at: null
@@ -608,13 +609,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.825Z'
+ date_added: '2024-10-02T23:17:48.842Z'
email: null
flags: '0'
has_global_access: true
invite_status: 0
inviter_id: null
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -625,13 +626,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 2
- fields:
- date_added: '2024-09-30T23:40:23.890Z'
+ date_added: '2024-10-02T23:17:48.906Z'
email: invited-by-superadmin-not-in-org@example.com
flags: '0'
has_global_access: true
invite_status: 1
inviter_id: 1
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -642,13 +643,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 3
- fields:
- date_added: '2024-09-30T23:40:23.914Z'
+ date_added: '2024-10-02T23:17:48.930Z'
email: invited-by-org-owner@example.com
flags: '0'
has_global_access: true
invite_status: 1
inviter_id: 2
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -659,13 +660,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 4
- fields:
- date_added: '2024-09-30T23:40:23.940Z'
+ date_added: '2024-10-02T23:17:48.955Z'
email: invited-by-org-member@example.com
flags: '0'
has_global_access: true
invite_status: 1
inviter_id: 3
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -676,13 +677,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 5
- fields:
- date_added: '2024-09-30T23:40:23.963Z'
+ date_added: '2024-10-02T23:17:48.978Z'
email: null
flags: '0'
has_global_access: true
invite_status: 0
inviter_id: 1
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -693,13 +694,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 6
- fields:
- date_added: '2024-09-30T23:40:24.031Z'
+ date_added: '2024-10-02T23:17:49.044Z'
email: null
flags: '0'
has_global_access: true
invite_status: 0
inviter_id: 2
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -710,13 +711,13 @@ source: tests/sentry/backup/test_releases.py
model: sentry.organizationmember
pk: 7
- fields:
- date_added: '2024-09-30T23:40:24.096Z'
+ date_added: '2024-10-02T23:17:49.108Z'
email: null
flags: '0'
has_global_access: true
invite_status: 0
inviter_id: 3
- organization: 4554792498561024
+ organization: 4554803734380544
role: member
token: null
token_expires_at: null
@@ -729,31 +730,31 @@ source: tests/sentry/backup/test_releases.py
- fields:
member: 2
requester_id: 2
- team: 4554792498626560
+ team: 4554803734446080
model: sentry.organizationaccessrequest
pk: 1
- fields:
config:
schedule: '* * * * *'
schedule_type: 1
- date_added: '2024-09-30T23:40:24.420Z'
- guid: d33ba321-9e78-41ef-85c1-ada207bb7a7c
+ date_added: '2024-10-02T23:17:49.401Z'
+ guid: 16e04b37-827b-4087-920d-1b56d4320f60
is_muted: false
name: ''
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
owner_team_id: null
owner_user_id: 2
- project_id: 4554792498626561
- slug: a9d580f1b745
+ project_id: 4554803734446081
+ slug: c3810f64719b
status: 0
type: 3
model: sentry.monitor
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.544Z'
- date_updated: '2024-09-30T23:40:24.544Z'
+ date_added: '2024-10-02T23:17:49.522Z'
+ date_updated: '2024-10-02T23:17:49.522Z'
name: View 1 for test-org
- organization: 4554792498561024
+ organization: 4554803734380544
position: 0
query: some query for test-org
query_sort: date
@@ -761,116 +762,116 @@ source: tests/sentry/backup/test_releases.py
model: sentry.groupsearchview
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.411Z'
- name: happily star hornet
- organization_id: 4554792498561024
+ date_added: '2024-10-02T23:17:49.398Z'
+ name: truly super civet
+ organization_id: 4554803734380544
model: sentry.environment
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.541Z'
+ date_added: '2024-10-02T23:17:48.555Z'
email: superadmin
model: sentry.email
pk: 1
- fields:
- date_added: '2024-09-30T23:40:23.611Z'
+ date_added: '2024-10-02T23:17:48.627Z'
email: owner
model: sentry.email
pk: 2
- fields:
- date_added: '2024-09-30T23:40:23.628Z'
+ date_added: '2024-10-02T23:17:48.647Z'
email: member
model: sentry.email
pk: 3
- fields:
- date_added: '2024-09-30T23:40:23.649Z'
+ date_added: '2024-10-02T23:17:48.665Z'
email: added-by-superadmin-not-in-org
model: sentry.email
pk: 4
- fields:
- date_added: '2024-09-30T23:40:23.666Z'
+ date_added: '2024-10-02T23:17:48.683Z'
email: added-by-org-owner
model: sentry.email
pk: 5
- fields:
- date_added: '2024-09-30T23:40:23.683Z'
+ date_added: '2024-10-02T23:17:48.702Z'
email: added-by-org-member
model: sentry.email
pk: 6
- fields:
- date_added: '2024-09-30T23:40:24.471Z'
+ date_added: '2024-10-02T23:17:49.453Z'
email: admin@localhost
model: sentry.email
pk: 7
- fields:
- date_added: '2024-09-30T23:40:24.572Z'
- email: 6f1e4a94e49d4f258e33a5b0bc62ad57@example.com
+ date_added: '2024-10-02T23:17:49.554Z'
+ email: ab2de483556e4012a3d8434fa945fae2@example.com
model: sentry.email
pk: 8
- fields:
- date_added: '2024-09-30T23:40:24.642Z'
+ date_added: '2024-10-02T23:17:49.625Z'
email: ''
model: sentry.email
pk: 9
- fields:
- date_added: '2024-09-30T23:40:24.853Z'
- email: a33698fb604d4840a0c740d2483b01fa@example.com
+ date_added: '2024-10-02T23:17:49.852Z'
+ email: 6fc445fcc0eb4ac789ca5761c1949479@example.com
model: sentry.email
pk: 10
- fields:
- access_end: '2024-10-01T23:40:24.552Z'
- access_start: '2024-09-30T23:40:24.552Z'
- date_added: '2024-09-30T23:40:24.552Z'
- date_updated: '2024-09-30T23:40:24.552Z'
- organization: 4554792498561024
+ access_end: '2024-10-03T23:17:49.531Z'
+ access_start: '2024-10-02T23:17:49.531Z'
+ date_added: '2024-10-02T23:17:49.531Z'
+ date_updated: '2024-10-02T23:17:49.531Z'
+ organization: 4554803734380544
zendesk_tickets: '[]'
model: sentry.datasecrecywaiver
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.521Z'
- organization: 4554792498561024
+ date_added: '2024-10-02T23:17:49.503Z'
+ organization: 4554803734380544
slug: test-tombstone-in-test-org
model: sentry.dashboardtombstone
pk: 1
- fields:
created_by_id: 2
- date_added: '2024-09-30T23:40:24.517Z'
+ date_added: '2024-10-02T23:17:49.499Z'
filters: null
- last_visited: '2024-09-30T23:40:24.517Z'
- organization: 4554792498561024
+ last_visited: '2024-10-02T23:17:49.499Z'
+ organization: 4554803734380544
title: Dashboard 1 for test-org
visits: 1
model: sentry.dashboard
pk: 1
- fields:
condition: '{"op":"equals","name":"environment","value":"prod"}'
- condition_hash: 23ee44811554162c88b3635b76fd22e6a4c4fc53
+ condition_hash: b1bed9afbb60d76d92844345b51712689b4f59b9
created_by_id: 2
- date_added: '2024-09-30T23:40:24.397Z'
- end_date: '2024-10-01T00:40:24.392Z'
+ date_added: '2024-10-02T23:17:49.392Z'
+ end_date: '2024-10-03T00:17:49.388Z'
is_active: true
is_org_level: false
notification_sent: false
num_samples: 100
- organization: 4554792498561024
+ organization: 4554803734380544
query: environment:prod event.type:transaction
rule_id: 1
sample_rate: 0.5
- start_date: '2024-09-30T23:40:24.392Z'
+ start_date: '2024-10-02T23:17:49.388Z'
model: sentry.customdynamicsamplingrule
pk: 1
- fields:
- project: 4554792498626561
+ project: 4554803734446081
value: 2
model: sentry.counter
pk: 1
- fields:
config: {}
- date_added: '2024-09-30T23:40:24.288Z'
+ date_added: '2024-10-02T23:17:49.297Z'
default_global_access: true
default_role: 50
flags: '0'
last_sync: null
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
provider: sentry
sync_time: null
model: sentry.authprovider
@@ -886,16 +887,16 @@ source: tests/sentry/backup/test_releases.py
- 3
key4:
nested_key: nested_value
- date_added: '2024-09-30T23:40:24.314Z'
+ date_added: '2024-10-02T23:17:49.324Z'
ident: 123456789test-org
- last_synced: '2024-09-30T23:40:24.314Z'
- last_verified: '2024-09-30T23:40:24.314Z'
+ last_synced: '2024-10-02T23:17:49.324Z'
+ last_verified: '2024-10-02T23:17:49.324Z'
user: 2
model: sentry.authidentity
pk: 1
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.553Z'
+ created_at: '2024-10-02T23:17:48.567Z'
last_used_at: null
type: 1
user: 1
@@ -903,7 +904,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.619Z'
+ created_at: '2024-10-02T23:17:48.638Z'
last_used_at: null
type: 1
user: 2
@@ -911,7 +912,7 @@ source: tests/sentry/backup/test_releases.py
pk: 2
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.640Z'
+ created_at: '2024-10-02T23:17:48.656Z'
last_used_at: null
type: 1
user: 3
@@ -919,7 +920,7 @@ source: tests/sentry/backup/test_releases.py
pk: 3
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.657Z'
+ created_at: '2024-10-02T23:17:48.675Z'
last_used_at: null
type: 1
user: 4
@@ -927,7 +928,7 @@ source: tests/sentry/backup/test_releases.py
pk: 4
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.674Z'
+ created_at: '2024-10-02T23:17:48.693Z'
last_used_at: null
type: 1
user: 5
@@ -935,7 +936,7 @@ source: tests/sentry/backup/test_releases.py
pk: 5
- fields:
config: '""'
- created_at: '2024-09-30T23:40:23.692Z'
+ created_at: '2024-10-02T23:17:48.710Z'
last_used_at: null
type: 1
user: 6
@@ -943,10 +944,10 @@ source: tests/sentry/backup/test_releases.py
pk: 6
- fields:
allowed_origins: null
- date_added: '2024-09-30T23:40:24.265Z'
- key: a8a17ee8deda4ad8a0a4d21f5db9e1d8
+ date_added: '2024-10-02T23:17:49.274Z'
+ key: 86359a2a640b4f36af0fb04be29b4af1
label: Default
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
scope_list: '[]'
scopes: '0'
status: 0
@@ -954,11 +955,11 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
allowed_origins: ''
- client_id: e1e6f75d0741584da1ea8fc325f53547e04271036865e2bf893211c085242287
- client_secret: f2f3b600e528811e11923d780f02937f135f060f589e4b1b9948eaf2b69ac370
- date_added: '2024-09-30T23:40:24.648Z'
+ client_id: fb443bb937cdfaa2814bdb02a820fb09635d597de72902d62b3d163b1dbad157
+ client_secret: cc2dac10c964e3c5e97bea34eaf1fdc8ba4438063732108ae193fb6d624917af
+ date_added: '2024-10-02T23:17:49.630Z'
homepage_url: null
- name: Improved Hippo
+ name: Classic Javelin
owner: 9
privacy_url: null
redirect_uris: ''
@@ -1017,89 +1018,89 @@ source: tests/sentry/backup/test_releases.py
model: sentry.useroption
pk: 6
- fields:
- date_hash_added: '2024-09-30T23:40:23.539Z'
+ date_hash_added: '2024-10-02T23:17:48.552Z'
email: superadmin
is_verified: true
user: 1
- validation_hash: epT28rQXxJXIIraPjJyGFVEW6mp5rKl5
+ validation_hash: UmkyjwJl4mKrT79ssnrVXOcgyGH4XbYw
model: sentry.useremail
pk: 1
- fields:
- date_hash_added: '2024-09-30T23:40:23.608Z'
+ date_hash_added: '2024-10-02T23:17:48.625Z'
email: owner
is_verified: true
user: 2
- validation_hash: ESzRdVKLjm0I2TpSCSZa0ZiwnPVaRoh9
+ validation_hash: 5APqERShNOxqZnnRU59sjUp7G5GSA1fV
model: sentry.useremail
pk: 2
- fields:
- date_hash_added: '2024-09-30T23:40:23.626Z'
+ date_hash_added: '2024-10-02T23:17:48.644Z'
email: member
is_verified: true
user: 3
- validation_hash: DEOyMHGAZGicNYqkbole7nnByVjcFJHZ
+ validation_hash: hb9UZ6kikC9E2kHIQmqZs85TS6hwLYCJ
model: sentry.useremail
pk: 3
- fields:
- date_hash_added: '2024-09-30T23:40:23.646Z'
+ date_hash_added: '2024-10-02T23:17:48.663Z'
email: added-by-superadmin-not-in-org
is_verified: true
user: 4
- validation_hash: y85iTQsdaqX4Lel5QMLqzoypOyY8WGk3
+ validation_hash: eCOYYZdJg6TZGLbYobGnlk78RioMIaiN
model: sentry.useremail
pk: 4
- fields:
- date_hash_added: '2024-09-30T23:40:23.663Z'
+ date_hash_added: '2024-10-02T23:17:48.681Z'
email: added-by-org-owner
is_verified: true
user: 5
- validation_hash: CpfIfCNqogHmmzWZ72xlLkWHH28c7waf
+ validation_hash: B83uJPj0NT7ZfFFoIssmj265Zsz8GXew
model: sentry.useremail
pk: 5
- fields:
- date_hash_added: '2024-09-30T23:40:23.680Z'
+ date_hash_added: '2024-10-02T23:17:48.699Z'
email: added-by-org-member
is_verified: true
user: 6
- validation_hash: PyQBcRFTpLct3Xi0YDg3mjYhMk8hycmc
+ validation_hash: 458fTgDeUv45VqqskoBxnTCZYFmoB8JP
model: sentry.useremail
pk: 6
- fields:
- date_hash_added: '2024-09-30T23:40:24.468Z'
+ date_hash_added: '2024-10-02T23:17:49.450Z'
email: admin@localhost
is_verified: true
user: 7
- validation_hash: diTob90xxOTu2O7OwoSRwOU4DuXBbj3y
+ validation_hash: xfvJHdBO0E00iQDqQgIha7iFKGMNQ46B
model: sentry.useremail
pk: 7
- fields:
- date_hash_added: '2024-09-30T23:40:24.570Z'
- email: 6f1e4a94e49d4f258e33a5b0bc62ad57@example.com
+ date_hash_added: '2024-10-02T23:17:49.552Z'
+ email: ab2de483556e4012a3d8434fa945fae2@example.com
is_verified: true
user: 8
- validation_hash: KTnBdBnGJrb0evIIIZP0pLmrLBgcabKm
+ validation_hash: KkUHqeUKNr1e8H7H3p8tyLANvaWpYIfP
model: sentry.useremail
pk: 8
- fields:
- date_hash_added: '2024-09-30T23:40:24.639Z'
+ date_hash_added: '2024-10-02T23:17:49.623Z'
email: ''
is_verified: false
user: 9
- validation_hash: wOJV2aIrESH19Gorx3FOBjfFDGIMm8wt
+ validation_hash: GwymkPM0k4jufmvPDigOpDKyHSbGG26y
model: sentry.useremail
pk: 9
- fields:
- date_hash_added: '2024-09-30T23:40:24.850Z'
- email: a33698fb604d4840a0c740d2483b01fa@example.com
+ date_hash_added: '2024-10-02T23:17:49.849Z'
+ email: 6fc445fcc0eb4ac789ca5761c1949479@example.com
is_verified: true
user: 10
- validation_hash: 3cd8S4EpHAy13kvIoT6eGFnrmT2ouQXO
+ validation_hash: GAN95mzDjiYFpWHSMQa2k8sXWKudFy9M
model: sentry.useremail
pk: 10
- fields:
aggregate: count()
dataset: events
- date_added: '2024-09-30T23:40:24.445Z'
+ date_added: '2024-10-02T23:17:49.427Z'
environment: null
query: level:error
resolution: 60
@@ -1110,7 +1111,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
aggregate: count()
dataset: events
- date_added: '2024-09-30T23:40:24.479Z'
+ date_added: '2024-10-02T23:17:49.461Z'
environment: null
query: level:error
resolution: 60
@@ -1121,7 +1122,7 @@ source: tests/sentry/backup/test_releases.py
- fields:
aggregate: count()
dataset: events
- date_added: '2024-09-30T23:40:24.496Z'
+ date_added: '2024-10-02T23:17:49.480Z'
environment: null
query: test query
resolution: 60
@@ -1132,18 +1133,18 @@ source: tests/sentry/backup/test_releases.py
- fields:
application: 1
author: A Company
- creator_label: 6f1e4a94e49d4f258e33a5b0bc62ad57@example.com
+ creator_label: ab2de483556e4012a3d8434fa945fae2@example.com
creator_user: 8
- date_added: '2024-09-30T23:40:24.649Z'
+ date_added: '2024-10-02T23:17:49.632Z'
date_deleted: null
date_published: null
- date_updated: '2024-09-30T23:40:24.808Z'
+ date_updated: '2024-10-02T23:17:49.801Z'
events: '[]'
is_alertable: false
metadata: {}
name: test app
overview: A sample description
- owner_id: 4554792498561024
+ owner_id: 4554803734380544
popularity: 1
proxy_user: 9
redirect_url: https://example.com/sentry-app/redirect/
@@ -1184,27 +1185,27 @@ source: tests/sentry/backup/test_releases.py
scopes: '0'
slug: test-app
status: 0
- uuid: 4fdcdca6-1579-4db7-85d8-979ca6fa1dde
+ uuid: f315ef58-3efa-41df-a502-f9f395d8f87f
verify_install: true
webhook_url: https://example.com/sentry-app/webhook/
model: sentry.sentryapp
pk: 1
- fields:
data: '{"conditions":[{"id":"sentry.rules.conditions.first_seen_event.FirstSeenEventCondition"},{"id":"sentry.rules.conditions.every_event.EveryEventCondition"}],"action_match":"all","filter_match":"all","actions":[{"id":"sentry.rules.actions.notify_event.NotifyEventAction"},{"id":"sentry.rules.actions.notify_event_service.NotifyEventServiceAction","service":"mail"}]}'
- date_added: '2024-09-30T23:40:24.371Z'
+ date_added: '2024-10-02T23:17:49.383Z'
environment_id: null
label: ''
owner_team: null
owner_user_id: 2
- project: 4554792498626561
+ project: 4554803734446081
source: 0
status: 0
model: sentry.rule
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.455Z'
- date_updated: '2024-09-30T23:40:24.455Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.437Z'
+ date_updated: '2024-10-02T23:17:49.437Z'
+ project: 4554803734446081
query_extra: null
snuba_query: 1
status: 1
@@ -1213,9 +1214,9 @@ source: tests/sentry/backup/test_releases.py
model: sentry.querysubscription
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.486Z'
- date_updated: '2024-09-30T23:40:24.486Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.470Z'
+ date_updated: '2024-10-02T23:17:49.470Z'
+ project: 4554803734446081
query_extra: null
snuba_query: 2
status: 1
@@ -1224,9 +1225,9 @@ source: tests/sentry/backup/test_releases.py
model: sentry.querysubscription
pk: 2
- fields:
- date_added: '2024-09-30T23:40:24.501Z'
- date_updated: '2024-09-30T23:40:24.501Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.485Z'
+ date_updated: '2024-10-02T23:17:49.485Z'
+ project: 4554803734446081
query_extra: null
snuba_query: 3
status: 1
@@ -1235,9 +1236,9 @@ source: tests/sentry/backup/test_releases.py
model: sentry.querysubscription
pk: 3
- fields:
- date_added: '2024-09-30T23:40:24.662Z'
- date_updated: '2024-09-30T23:40:24.662Z'
- project: 4554792498626564
+ date_added: '2024-10-02T23:17:49.645Z'
+ date_updated: '2024-10-02T23:17:49.645Z'
+ project: 4554803734446084
query_extra: null
snuba_query: 1
status: 1
@@ -1246,9 +1247,9 @@ source: tests/sentry/backup/test_releases.py
model: sentry.querysubscription
pk: 4
- fields:
- date_added: '2024-09-30T23:40:24.863Z'
- date_updated: '2024-09-30T23:40:24.863Z'
- project: 4554792498626565
+ date_added: '2024-10-02T23:17:49.862Z'
+ date_updated: '2024-10-02T23:17:49.862Z'
+ project: 4554803734446085
query_extra: null
snuba_query: 1
status: 1
@@ -1257,30 +1258,30 @@ source: tests/sentry/backup/test_releases.py
model: sentry.querysubscription
pk: 5
- fields:
- project: 4554792498626561
- team: 4554792498626560
+ project: 4554803734446081
+ team: 4554803734446080
model: sentry.projectteam
pk: 1
- fields:
- project: 4554792498626562
- team: 4554792498626560
+ project: 4554803734446082
+ team: 4554803734446080
model: sentry.projectteam
pk: 2
- fields:
- date_added: '2024-09-30T23:40:24.257Z'
- organization: 4554792498561024
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.265Z'
+ organization: 4554803734380544
+ project: 4554803734446081
redirect_slug: project_slug_in_test-org
model: sentry.projectredirect
pk: 1
- fields:
auto_assignment: true
codeowners_auto_sync: true
- date_created: '2024-09-30T23:40:24.252Z'
+ date_created: '2024-10-02T23:17:49.260Z'
fallthrough: true
is_active: true
- last_updated: '2024-09-30T23:40:24.252Z'
- project: 4554792498626561
+ last_updated: '2024-10-02T23:17:49.260Z'
+ project: 4554803734446081
raw: '{"hello":"hello"}'
schema:
hello: hello
@@ -1289,25 +1290,25 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
key: sentry:option-epoch
- project: 4554792498626561
+ project: 4554803734446081
value: 13
model: sentry.projectoption
pk: 1
- fields:
key: sentry:option-epoch
- project: 4554792498626562
+ project: 4554803734446082
value: 13
model: sentry.projectoption
pk: 2
- fields:
key: sentry:option-epoch
- project: 4554792498626564
+ project: 4554803734446084
value: 13
model: sentry.projectoption
pk: 3
- fields:
key: sentry:option-epoch
- project: 4554792498626565
+ project: 4554803734446085
value: 13
model: sentry.projectoption
pk: 4
@@ -1316,14 +1317,14 @@ source: tests/sentry/backup/test_releases.py
dynamicSdkLoaderOptions:
hasPerformance: true
hasReplay: true
- date_added: '2024-09-30T23:40:24.237Z'
+ date_added: '2024-10-02T23:17:49.247Z'
label: Default
- project: 4554792498626561
- public_key: 6e47cf844088948c4b5d9bebe6a14c1c
+ project: 4554803734446081
+ public_key: 1663e88e67761b985a9eb58579b101e6
rate_limit_count: null
rate_limit_window: null
roles: '1'
- secret_key: 0e330706c16915db29c264b35fc087d9
+ secret_key: 58aed72fa451cf995b0d97176a30a37c
status: 0
use_case: user
model: sentry.projectkey
@@ -1333,14 +1334,14 @@ source: tests/sentry/backup/test_releases.py
dynamicSdkLoaderOptions:
hasPerformance: true
hasReplay: true
- date_added: '2024-09-30T23:40:24.435Z'
+ date_added: '2024-10-02T23:17:49.416Z'
label: Default
- project: 4554792498626562
- public_key: 97589de91babe28eb51ba89ac20a2085
+ project: 4554803734446082
+ public_key: ba559262682194f5139876c2d1c10134
rate_limit_count: null
rate_limit_window: null
roles: '1'
- secret_key: bf221131beafd6efc4cb836a5e3926a4
+ secret_key: 190e88fb1b60deae9c8623d5f3417bb5
status: 0
use_case: user
model: sentry.projectkey
@@ -1350,14 +1351,14 @@ source: tests/sentry/backup/test_releases.py
dynamicSdkLoaderOptions:
hasPerformance: true
hasReplay: true
- date_added: '2024-09-30T23:40:24.670Z'
+ date_added: '2024-10-02T23:17:49.654Z'
label: Default
- project: 4554792498626564
- public_key: 2dd9297615816b3c17977bbe9e932ea5
+ project: 4554803734446084
+ public_key: 57266a8f3159a89f462d84eb258e519c
rate_limit_count: null
rate_limit_window: null
roles: '1'
- secret_key: 36599114a3599598647de7fcff57cbe4
+ secret_key: bee6f86b7bd47575eb049c82cf320ecb
status: 0
use_case: user
model: sentry.projectkey
@@ -1367,14 +1368,14 @@ source: tests/sentry/backup/test_releases.py
dynamicSdkLoaderOptions:
hasPerformance: true
hasReplay: true
- date_added: '2024-09-30T23:40:24.872Z'
+ date_added: '2024-10-02T23:17:49.871Z'
label: Default
- project: 4554792498626565
- public_key: 34c6dc66553bba755bf0046c71dee93f
+ project: 4554803734446085
+ public_key: f36a07bfcf1388ad02327d2abc1994a9
rate_limit_count: null
rate_limit_window: null
roles: '1'
- secret_key: edbfd00c715371a9bbf8948cffc9bde3
+ secret_key: 86842a700ceb5b1c38e25338070c68c1
status: 0
use_case: user
model: sentry.projectkey
@@ -1383,12 +1384,12 @@ source: tests/sentry/backup/test_releases.py
config:
hello: hello
integration_id: 1
- project: 4554792498626561
+ project: 4554803734446081
model: sentry.projectintegration
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.251Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.259Z'
+ project: 4554803734446081
user_id: 2
model: sentry.projectbookmark
pk: 1
@@ -1396,12 +1397,12 @@ source: tests/sentry/backup/test_releases.py
is_active: true
organizationmember: 1
role: null
- team: 4554792498626560
+ team: 4554803734446080
model: sentry.organizationmemberteam
pk: 1
- fields:
integration_id: null
- organization: 4554792498561024
+ organization: 4554803734380544
sentry_app_id: null
target_display: Sentry User
target_identifier: '1'
@@ -1412,7 +1413,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
integration_id: null
- organization: 4554792498561024
+ organization: 4554803734380544
sentry_app_id: 1
target_display: Sentry User
target_identifier: '1'
@@ -1422,24 +1423,24 @@ source: tests/sentry/backup/test_releases.py
model: sentry.notificationaction
pk: 2
- fields:
- disable_date: '2024-09-30T23:40:24.387Z'
+ disable_date: '2024-10-02T23:17:49.387Z'
opted_out: false
- organization: 4554792498561024
+ organization: 4554803734380544
rule: 1
- sent_final_email_date: '2024-09-30T23:40:24.387Z'
- sent_initial_email_date: '2024-09-30T23:40:24.387Z'
+ sent_final_email_date: '2024-10-02T23:17:49.387Z'
+ sent_initial_email_date: '2024-10-02T23:17:49.387Z'
model: sentry.neglectedrule
pk: 1
- fields:
environment: 1
is_hidden: null
- project: 4554792498626561
+ project: 4554803734446081
model: sentry.environmentproject
pk: 1
- fields:
dashboard: 1
dataset_source: 0
- date_added: '2024-09-30T23:40:24.518Z'
+ date_added: '2024-10-02T23:17:49.500Z'
description: null
detail: null
discover_widget_split: null
@@ -1454,63 +1455,63 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
custom_dynamic_sampling_rule: 1
- project: 4554792498626561
+ project: 4554803734446081
model: sentry.customdynamicsamplingruleproject
pk: 1
- fields:
application: 1
- date_added: '2024-09-30T23:40:24.752Z'
- expires_at: '2024-10-01T07:40:24.752Z'
- hashed_refresh_token: 8903f7478b33c0f449e9384ac1fc303d9c95dcae34b7fed9c162ac5ebf70712a
- hashed_token: 14679f49467550f295e19d79b34741ca4609ab33c3df834eabadd8655c82a92c
+ date_added: '2024-10-02T23:17:49.736Z'
+ expires_at: '2024-10-03T07:17:49.735Z'
+ hashed_refresh_token: 96ca6697830f8cf9e6acc53496a61ad853796784fd633c0fb109b96cd41a60d7
+ hashed_token: 285ed30eea5f2ef6537e12d59b7b97340a149cab005e53183a834533474f9e68
name: null
- refresh_token: e00e8651421243055511c2ec640556f363cdc35eec63c10679d51c7dc90d088d
+ refresh_token: cc0b29d7590b6d070d9125ae76e7c9a32226a5e941b0e05db1997bcd9e3efdfc
scope_list: '[]'
scopes: '0'
scoping_organization_id: null
- token: 7696f8c5e05e5ada2b838c2afe028ea6d61c8e714d0709bc2418a1f49948e7e9
- token_last_characters: e7e9
+ token: 18abe9e706a62394946026e1a279c1b99888ca5b7e9d0402288f79481e5a1959
+ token_last_characters: '1959'
token_type: null
user: 9
model: sentry.apitoken
pk: 1
- fields:
application: 1
- date_added: '2024-09-30T23:40:24.823Z'
+ date_added: '2024-10-02T23:17:49.820Z'
expires_at: null
- hashed_refresh_token: 9160dca7f348cf3e6e32bd0ec23a5a87b719ca85231c3f72190723f835afdd73
- hashed_token: abfc02fe0b04285dab820af6a7661d0ec228c919bc12fff29403d278963f28c0
+ hashed_refresh_token: 75a932c34e15b9c200de5fdaa4fa638cb7f172ea9475312d20a2f6ba5ded5227
+ hashed_token: a58aa3e8a98d44e3a3f6546e50db10aa45a6ef080d43ba6dae55384874e77ef4
name: create_exhaustive_sentry_app
- refresh_token: a43c16090aba5f0cabf72b78d1456f416a7fea9bc146e53536042f7c2041e872
+ refresh_token: 8cc21374f0ce88505b6c08f41a4df9cbc7f9ff00e84e23658de10bf704af00dd
scope_list: '[]'
scopes: '0'
scoping_organization_id: null
- token: f2766a4143f4c43501e654d2c4f60e869912662ae79ddefa35ba43d94cf13deb
- token_last_characters: 3deb
+ token: d3ca2fc27353c6d928efb215edfd85468ab9ce8492e3eb5e06c0ed9d0f7ea11a
+ token_last_characters: a11a
token_type: null
user: 2
model: sentry.apitoken
pk: 2
- fields:
application: null
- date_added: '2024-09-30T23:40:24.891Z'
+ date_added: '2024-10-02T23:17:49.890Z'
expires_at: null
hashed_refresh_token: null
- hashed_token: 182476f2ef5790896bc39c584d2af8c632628419fd6f59b11d3fef438a05556c
+ hashed_token: 2769c494571aa2ddce170d84b8622b4a6cbdc006da5301b2ecf7096e58c1d40a
name: create_exhaustive_global_configs_for_
refresh_token: null
scope_list: '[]'
scopes: '0'
scoping_organization_id: null
- token: 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
- token_last_characters: '4221'
+ token: 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
+ token_last_characters: '8385'
token_type: sntryu_
user: 2
model: sentry.apitoken
pk: 3
- fields:
application: 1
- code: ce1501ad0ac88308338a11659ad141ee7d07151742c4569b1335f859fafdf126
+ code: f35e522f29e0ca741a6de0a0104e21e26ecee99e7374b4ab09f2e07953469560
expires_at: '2022-01-01T11:11:00.000Z'
organization_id: null
redirect_uri: https://example.com
@@ -1521,7 +1522,7 @@ source: tests/sentry/backup/test_releases.py
pk: 2
- fields:
application: 1
- date_added: '2024-09-30T23:40:24.821Z'
+ date_added: '2024-10-02T23:17:49.818Z'
organization_id: null
scope_list: '[]'
scopes: '0'
@@ -1530,7 +1531,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
application: null
- date_added: '2024-09-30T23:40:24.890Z'
+ date_added: '2024-10-02T23:17:49.889Z'
organization_id: null
scope_list: '[]'
scopes: '0'
@@ -1539,14 +1540,14 @@ source: tests/sentry/backup/test_releases.py
pk: 2
- fields:
comparison_delta: null
- date_added: '2024-09-30T23:40:24.447Z'
- date_modified: '2024-09-30T23:40:24.447Z'
+ date_added: '2024-10-02T23:17:49.429Z'
+ date_modified: '2024-10-02T23:17:49.429Z'
description: null
detection_type: static
include_all_projects: true
monitor_type: 0
- name: Tops Werewolf
- organization: 4554792498561024
+ name: Meet Labrador
+ organization: 4554803734380544
resolve_threshold: null
seasonality: null
sensitivity: null
@@ -1560,14 +1561,14 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
comparison_delta: null
- date_added: '2024-09-30T23:40:24.482Z'
- date_modified: '2024-09-30T23:40:24.482Z'
+ date_added: '2024-10-02T23:17:49.466Z'
+ date_modified: '2024-10-02T23:17:49.466Z'
description: null
detection_type: static
include_all_projects: false
monitor_type: 1
- name: Stirred Mink
- organization: 4554792498561024
+ name: Awake Raptor
+ organization: 4554803734380544
resolve_threshold: null
seasonality: null
sensitivity: null
@@ -1581,14 +1582,14 @@ source: tests/sentry/backup/test_releases.py
pk: 2
- fields:
comparison_delta: null
- date_added: '2024-09-30T23:40:24.498Z'
- date_modified: '2024-09-30T23:40:24.498Z'
+ date_added: '2024-10-02T23:17:49.482Z'
+ date_modified: '2024-10-02T23:17:49.482Z'
description: null
detection_type: static
include_all_projects: false
monitor_type: 0
- name: Nearby Calf
- organization: 4554792498561024
+ name: Free Midge
+ organization: 4554803734380544
resolve_threshold: null
seasonality: null
sensitivity: null
@@ -1618,13 +1619,13 @@ source: tests/sentry/backup/test_releases.py
- fields:
api_grant: null
api_token: 1
- date_added: '2024-09-30T23:40:24.682Z'
+ date_added: '2024-10-02T23:17:49.666Z'
date_deleted: null
- date_updated: '2024-09-30T23:40:24.721Z'
- organization_id: 4554792498561024
+ date_updated: '2024-10-02T23:17:49.707Z'
+ organization_id: 4554803734380544
sentry_app: 1
status: 1
- uuid: aeace1cb-e02d-46ae-8e10-41e72817793c
+ uuid: 87d08793-5123-49f0-8a61-d4735efeb710
model: sentry.sentryappinstallation
pk: 1
- fields:
@@ -1662,12 +1663,12 @@ source: tests/sentry/backup/test_releases.py
type: alert-rule-action
sentry_app: 1
type: alert-rule-action
- uuid: 59b2f303-c333-4ad9-acc1-51b4477319de
+ uuid: 362cdce7-dc3b-4a4a-b257-1c012bd91122
model: sentry.sentryappcomponent
pk: 1
- fields:
alert_rule: null
- date_added: '2024-09-30T23:40:24.376Z'
+ date_added: '2024-10-02T23:17:49.385Z'
owner_id: 2
rule: 1
until: null
@@ -1675,7 +1676,7 @@ source: tests/sentry/backup/test_releases.py
model: sentry.rulesnooze
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.373Z'
+ date_added: '2024-10-02T23:17:49.384Z'
rule: 1
type: 1
user_id: 2
@@ -1683,20 +1684,20 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
action: 1
- project: 4554792498626561
+ project: 4554803734446081
model: sentry.notificationactionproject
pk: 1
- fields:
action: 2
- project: 4554792498626561
+ project: 4554803734446081
model: sentry.notificationactionproject
pk: 2
- fields:
aggregates: null
columns: null
conditions: ''
- date_added: '2024-09-30T23:40:24.519Z'
- date_modified: '2024-09-30T23:40:24.519Z'
+ date_added: '2024-10-02T23:17:49.501Z'
+ date_modified: '2024-10-02T23:17:49.501Z'
field_aliases: null
fields: '[]'
is_hidden: false
@@ -1710,8 +1711,8 @@ source: tests/sentry/backup/test_releases.py
- fields:
alert_rule: 1
alert_threshold: 100.0
- date_added: '2024-09-30T23:40:24.463Z'
- label: Amazing Mammoth
+ date_added: '2024-10-02T23:17:49.445Z'
+ label: Sought Owl
resolve_threshold: null
threshold_type: null
model: sentry.alertruletrigger
@@ -1719,39 +1720,39 @@ source: tests/sentry/backup/test_releases.py
- fields:
alert_rule: 2
alert_threshold: 100.0
- date_added: '2024-09-30T23:40:24.492Z'
- label: Well Antelope
+ date_added: '2024-10-02T23:17:49.477Z'
+ label: Smashing Mayfly
resolve_threshold: null
threshold_type: null
model: sentry.alertruletrigger
pk: 2
- fields:
alert_rule: 1
- date_added: '2024-09-30T23:40:24.454Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.436Z'
+ project: 4554803734446081
model: sentry.alertruleprojects
pk: 1
- fields:
alert_rule: 2
- date_added: '2024-09-30T23:40:24.483Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.468Z'
+ project: 4554803734446081
model: sentry.alertruleprojects
pk: 2
- fields:
alert_rule: 3
- date_added: '2024-09-30T23:40:24.499Z'
- project: 4554792498626561
+ date_added: '2024-10-02T23:17:49.483Z'
+ project: 4554803734446081
model: sentry.alertruleprojects
pk: 3
- fields:
alert_rule: 1
- date_added: '2024-09-30T23:40:24.450Z'
- project: 4554792498626562
+ date_added: '2024-10-02T23:17:49.432Z'
+ project: 4554803734446082
model: sentry.alertruleexcludedprojects
pk: 1
- fields:
alert_rule: 1
- date_added: '2024-09-30T23:40:24.456Z'
+ date_added: '2024-10-02T23:17:49.438Z'
previous_alert_rule: null
type: 1
user_id: 2
@@ -1759,7 +1760,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
alert_rule: 2
- date_added: '2024-09-30T23:40:24.485Z'
+ date_added: '2024-10-02T23:17:49.469Z'
previous_alert_rule: null
type: 1
user_id: null
@@ -1767,7 +1768,7 @@ source: tests/sentry/backup/test_releases.py
pk: 2
- fields:
alert_rule: 3
- date_added: '2024-09-30T23:40:24.502Z'
+ date_added: '2024-10-02T23:17:49.486Z'
previous_alert_rule: null
type: 1
user_id: null
@@ -1776,20 +1777,20 @@ source: tests/sentry/backup/test_releases.py
- fields:
alert_rule: 2
condition_type: 0
- date_added: '2024-09-30T23:40:24.483Z'
+ date_added: '2024-10-02T23:17:49.467Z'
label: ''
model: sentry.alertruleactivationcondition
pk: 1
- fields:
actor_id: 1
application_id: 1
- date_added: '2024-09-30T23:40:24.717Z'
+ date_added: '2024-10-02T23:17:49.703Z'
events: '[]'
- guid: 0df49f6920754fd2b675b1c143f0b37a
+ guid: f240cee2159d49e3b9376fc08552690b
installation_id: 1
- organization_id: 4554792498561024
+ organization_id: 4554803734380544
project_id: null
- secret: fc2e019b8f1914262f53b55fccca3bde190114e10b29c9b5858ada4add5bdf65
+ secret: b5cb095cee45416f06af0278b4eed4460bc122d301d7010d3d5fae4dd1b6a1a9
status: 0
url: https://example.com/sentry-app/webhook/
version: 0
@@ -1798,13 +1799,13 @@ source: tests/sentry/backup/test_releases.py
- fields:
actor_id: 10
application_id: 1
- date_added: '2024-09-30T23:40:24.880Z'
+ date_added: '2024-10-02T23:17:49.879Z'
events: '[''event.created'']'
- guid: 1602a1ec0f894e1680f1015619a17f43
+ guid: 2583120ed5f041ac90f4b7659cd53169
installation_id: 1
- organization_id: 4554792498561024
- project_id: 4554792498626565
- secret: 30141ef7d798ccc784a229a5faadd432547e99650d8e91ce40b384c8b974070b
+ organization_id: 4554803734380544
+ project_id: 4554803734446085
+ secret: 966d2cd710a5ff8d1f7944d9800f66d723c76158d37260204b677e2aa7e81b47
status: 0
url: https://example.com/sentry/webhook
version: 0
@@ -1813,24 +1814,24 @@ source: tests/sentry/backup/test_releases.py
- fields:
activation: null
alert_rule: 3
- date_added: '2024-09-30T23:40:24.506Z'
+ date_added: '2024-10-02T23:17:49.489Z'
date_closed: null
- date_detected: '2024-09-30T23:40:24.504Z'
- date_started: '2024-09-30T23:40:24.504Z'
+ date_detected: '2024-10-02T23:17:49.488Z'
+ date_started: '2024-10-02T23:17:49.488Z'
detection_uuid: null
identifier: 1
- organization: 4554792498561024
+ organization: 4554803734380544
status: 1
status_method: 3
subscription: null
- title: Aware Vervet
+ title: Brief Racer
type: 2
model: sentry.incident
pk: 1
- fields:
dashboard_widget_query: 1
- date_added: '2024-09-30T23:40:24.520Z'
- date_modified: '2024-09-30T23:40:24.520Z'
+ date_added: '2024-10-02T23:17:49.502Z'
+ date_modified: '2024-10-02T23:17:49.502Z'
extraction_state: disabled:not-applicable
spec_hashes: '[]'
spec_version: null
@@ -1838,13 +1839,13 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
alert_rule_trigger: 1
- date_added: '2024-09-30T23:40:24.464Z'
+ date_added: '2024-10-02T23:17:49.446Z'
query_subscription: 1
model: sentry.alertruletriggerexclusion
pk: 1
- fields:
alert_rule_trigger: 1
- date_added: '2024-09-30T23:40:24.478Z'
+ date_added: '2024-10-02T23:17:49.460Z'
integration_id: null
sentry_app_config: null
sentry_app_id: null
@@ -1857,7 +1858,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
alert_rule_trigger: 2
- date_added: '2024-09-30T23:40:24.494Z'
+ date_added: '2024-10-02T23:17:49.478Z'
integration_id: null
sentry_app_config: null
sentry_app_id: null
@@ -1869,35 +1870,35 @@ source: tests/sentry/backup/test_releases.py
model: sentry.alertruletriggeraction
pk: 2
- fields:
- date_added: '2024-09-30T23:40:24.512Z'
- end: '2024-09-30T23:40:24.512Z'
+ date_added: '2024-10-02T23:17:49.494Z'
+ end: '2024-10-02T23:17:49.494Z'
period: 1
- start: '2024-09-29T23:40:24.512Z'
+ start: '2024-10-01T23:17:49.494Z'
values: '[[1.0, 2.0, 3.0], [1.5, 2.5, 3.5]]'
model: sentry.timeseriessnapshot
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.517Z'
+ date_added: '2024-10-02T23:17:49.498Z'
incident: 1
- target_run_date: '2024-10-01T03:40:24.516Z'
+ target_run_date: '2024-10-03T03:17:49.498Z'
model: sentry.pendingincidentsnapshot
pk: 1
- fields:
alert_rule_trigger: 1
- date_added: '2024-09-30T23:40:24.515Z'
- date_modified: '2024-09-30T23:40:24.515Z'
+ date_added: '2024-10-02T23:17:49.497Z'
+ date_modified: '2024-10-02T23:17:49.497Z'
incident: 1
status: 1
model: sentry.incidenttrigger
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.514Z'
+ date_added: '2024-10-02T23:17:49.496Z'
incident: 1
user_id: 2
model: sentry.incidentsubscription
pk: 1
- fields:
- date_added: '2024-09-30T23:40:24.513Z'
+ date_added: '2024-10-02T23:17:49.495Z'
event_stats_snapshot: 1
incident: 1
total_events: 1
@@ -1906,7 +1907,7 @@ source: tests/sentry/backup/test_releases.py
pk: 1
- fields:
comment: hello test-org
- date_added: '2024-09-30T23:40:24.511Z'
+ date_added: '2024-10-02T23:17:49.493Z'
incident: 1
notification_uuid: null
previous_value: null
From b0c9b0aee0bcc9b558ef7ca9b3df9a0eea5e3cb5 Mon Sep 17 00:00:00 2001
From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com>
Date: Thu, 3 Oct 2024 13:40:04 -0400
Subject: [PATCH 077/139] ref: fix typing for sentry.features (#78557)
---
pyproject.toml | 3 --
src/sentry/features/handler.py | 27 +++++++++++------
src/sentry/features/manager.py | 32 ++++++++++----------
src/sentry/testutils/helpers/features.py | 22 ++++++++------
tests/sentry/tasks/test_on_demand_metrics.py | 4 +--
5 files changed, 49 insertions(+), 39 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index d9bbffceab27ff..334f7d1b81c55f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -205,8 +205,6 @@ module = [
"sentry.db.router",
"sentry.discover.endpoints.discover_key_transactions",
"sentry.eventstore.models",
- "sentry.features.handler",
- "sentry.features.manager",
"sentry.grouping.strategies.legacy",
"sentry.identity.bitbucket.provider",
"sentry.identity.github_enterprise.provider",
@@ -348,7 +346,6 @@ module = [
"sentry.templatetags.sentry_plugins",
"sentry.testutils.cases",
"sentry.testutils.fixtures",
- "sentry.testutils.helpers.features",
"sentry.testutils.helpers.notifications",
"sentry.testutils.helpers.slack",
"sentry.utils.auth",
diff --git a/src/sentry/features/handler.py b/src/sentry/features/handler.py
index 846626c158a947..4239e49506a106 100644
--- a/src/sentry/features/handler.py
+++ b/src/sentry/features/handler.py
@@ -1,9 +1,7 @@
from __future__ import annotations
-__all__ = ["FeatureHandler", "BatchFeatureHandler"]
-
import abc
-from collections.abc import Mapping, MutableSet, Sequence
+from collections.abc import Sequence
from typing import TYPE_CHECKING
if TYPE_CHECKING:
@@ -17,6 +15,9 @@
from sentry.users.services.user import RpcUser
+__all__ = ["FeatureHandler", "BatchFeatureHandler"]
+
+
class FeatureHandler:
"""
Base class for defining custom logic for feature decisions.
@@ -28,7 +29,7 @@ class FeatureHandler:
as we don't programatically release features in self-hosted.
"""
- features: MutableSet[str] = set()
+ features: set[str] = set()
def __call__(self, feature: Feature, actor: User) -> bool | None:
if feature.name not in self.features:
@@ -45,7 +46,7 @@ def has(
) -> bool | None:
raise NotImplementedError
- def has_for_batch(self, batch: FeatureCheckBatch) -> Mapping[Project, bool | None]:
+ def has_for_batch(self, batch: FeatureCheckBatch) -> dict[Project, bool | None]:
# If not overridden, iterate over objects in the batch individually.
return {
obj: self.has(feature, batch.actor)
@@ -60,7 +61,7 @@ def batch_has(
projects: Sequence[Project] | None = None,
organization: Organization | None = None,
batch: bool = True,
- ) -> Mapping[str, Mapping[str, bool | None]] | None:
+ ) -> dict[str, dict[str, bool | None]] | None:
raise NotImplementedError
@@ -80,13 +81,21 @@ class BatchFeatureHandler(FeatureHandler):
@abc.abstractmethod
def _check_for_batch(
- self, feature_name: str, entity: Organization | User, actor: User
+ self,
+ feature_name: str,
+ entity: Organization | User | None,
+ actor: User | RpcUser | AnonymousUser | None,
) -> bool | None:
raise NotImplementedError
- def has(self, feature: Feature, actor: User, skip_entity: bool | None = False) -> bool | None:
+ def has(
+ self,
+ feature: Feature,
+ actor: User | RpcUser | AnonymousUser | None,
+ skip_entity: bool | None = False,
+ ) -> bool | None:
return self._check_for_batch(feature.name, feature.get_subject(), actor)
- def has_for_batch(self, batch: FeatureCheckBatch) -> Mapping[Project, bool | None]:
+ def has_for_batch(self, batch: FeatureCheckBatch) -> dict[Project, bool | None]:
flag = self._check_for_batch(batch.feature_name, batch.subject, batch.actor)
return {obj: flag for obj in batch.objects}
diff --git a/src/sentry/features/manager.py b/src/sentry/features/manager.py
index 4e045f4d8eaa91..de8d4f79000a25 100644
--- a/src/sentry/features/manager.py
+++ b/src/sentry/features/manager.py
@@ -6,7 +6,7 @@
import abc
from collections import defaultdict
-from collections.abc import Iterable, Mapping, MutableMapping, MutableSet, Sequence
+from collections.abc import Iterable, Sequence
from typing import TYPE_CHECKING, Any
import sentry_sdk
@@ -44,7 +44,7 @@ class RegisteredFeatureManager:
"""
def __init__(self) -> None:
- self._handler_registry: MutableMapping[str, list[FeatureHandler]] = defaultdict(list)
+ self._handler_registry: dict[str, list[FeatureHandler]] = defaultdict(list)
def add_handler(self, handler: FeatureHandler) -> None:
"""
@@ -78,7 +78,7 @@ def has_for_batch(
organization: Organization,
objects: Sequence[Project],
actor: User | None = None,
- ) -> Mapping[Project, bool]:
+ ) -> dict[Project, bool | None]:
"""
Determine if a feature is enabled for a batch of objects.
@@ -100,7 +100,7 @@ def has_for_batch(
>>> FeatureManager.has_for_batch('projects:feature', organization, [project1, project2], actor=request.user)
"""
- result = dict()
+ result: dict[Project, bool | None] = {}
remaining = set(objects)
handlers = self._handler_registry[name]
@@ -143,17 +143,17 @@ def has_for_batch(
class FeatureManager(RegisteredFeatureManager):
def __init__(self) -> None:
super().__init__()
- self._feature_registry: MutableMapping[str, type[Feature]] = {}
+ self._feature_registry: dict[str, type[Feature]] = {}
# Deprecated: Remove entity_features once flagr has been removed.
- self.entity_features: MutableSet[str] = set()
- self.exposed_features: MutableSet[str] = set()
- self.option_features: MutableSet[str] = set()
- self.flagpole_features: MutableSet[str] = set()
+ self.entity_features: set[str] = set()
+ self.exposed_features: set[str] = set()
+ self.option_features: set[str] = set()
+ self.flagpole_features: set[str] = set()
self._entity_handler: FeatureHandler | None = None
def all(
self, feature_type: type[Feature] = Feature, api_expose_only: bool = False
- ) -> Mapping[str, type[Feature]]:
+ ) -> dict[str, type[Feature]]:
"""
Get a mapping of feature name -> feature class, optionally specific to a
particular feature type.
@@ -328,7 +328,7 @@ def batch_has(
actor: User | RpcUser | AnonymousUser | None = None,
projects: Sequence[Project] | None = None,
organization: Organization | None = None,
- ) -> Mapping[str, Mapping[str, bool | None]] | None:
+ ) -> dict[str, dict[str, bool | None]] | None:
"""
Determine if multiple features are enabled. Unhandled flags will not be in
the results if they cannot be handled.
@@ -346,7 +346,7 @@ def batch_has(
# Fall back to default handler if no entity handler available.
project_features = [name for name in feature_names if name.startswith("projects:")]
if projects and project_features:
- results: MutableMapping[str, Mapping[str, bool]] = {}
+ results: dict[str, dict[str, bool | None]] = {}
for project in projects:
proj_results = results[f"project:{project.id}"] = {}
for feature_name in project_features:
@@ -357,7 +357,7 @@ def batch_has(
org_features = filter(lambda name: name.startswith("organizations:"), feature_names)
if organization and org_features:
- org_results = {}
+ org_results: dict[str, bool | None] = {}
for feature_name in org_features:
org_results[feature_name] = self.has(
feature_name, organization, actor=actor
@@ -370,7 +370,7 @@ def batch_has(
feature_names,
)
if unscoped_features:
- unscoped_results = {}
+ unscoped_results: dict[str, bool | None] = {}
for feature_name in unscoped_features:
unscoped_results[feature_name] = self.has(feature_name, actor=actor)
return {"unscoped": unscoped_results}
@@ -417,7 +417,7 @@ def __init__(
self.objects = objects
self.actor = actor
- def get_feature_objects(self) -> Mapping[Project, Feature]:
+ def get_feature_objects(self) -> dict[Project, Feature]:
"""
Iterate over individual Feature objects.
@@ -429,5 +429,5 @@ def get_feature_objects(self) -> Mapping[Project, Feature]:
return {obj: cls(self.feature_name, obj) for obj in self.objects}
@property
- def subject(self) -> Organization | User:
+ def subject(self) -> Organization | User | None:
return self.organization or self.actor
diff --git a/src/sentry/testutils/helpers/features.py b/src/sentry/testutils/helpers/features.py
index a7cf343f21c719..13df0462f3479d 100644
--- a/src/sentry/testutils/helpers/features.py
+++ b/src/sentry/testutils/helpers/features.py
@@ -2,7 +2,7 @@
import functools
import logging
-from collections.abc import Generator, Mapping
+from collections.abc import Generator, Mapping, Sequence
from contextlib import contextmanager
from unittest.mock import patch
@@ -24,7 +24,7 @@
@contextmanager
-def Feature(names):
+def Feature(names: str | Sequence[str] | dict[str, bool]) -> Generator[None]:
"""
Control whether a feature is enabled.
@@ -102,14 +102,18 @@ def features_override(name, *args, **kwargs):
logger.info("Flag defaulting to %s: %s", default_value, repr(name))
return default_value
- def batch_features_override(_feature_names, projects=None, organization=None, *args, **kwargs):
+ def batch_features_override(
+ _feature_names: Sequence[str], projects=None, organization=None, *args, **kwargs
+ ):
feature_results = {name: names[name] for name in _feature_names if name in names}
default_feature_names = [name for name in _feature_names if name not in names]
- default_feature_results = {}
+ default_feature_results: dict[str, dict[str, bool | None]] = {}
if default_feature_names:
- default_feature_results = default_batch_has(
+ defaults = default_batch_has(
default_feature_names, projects=projects, organization=organization, **kwargs
)
+ if defaults:
+ default_feature_results.update(defaults)
if projects:
results = {}
@@ -122,13 +126,13 @@ def batch_features_override(_feature_names, projects=None, organization=None, *a
return results
elif organization:
result_key = f"organization:{organization.id}"
- results = {**feature_results, **default_feature_results[result_key]}
- results = {
+ results_for_org = {**feature_results, **default_feature_results[result_key]}
+ results_for_org = {
name: resolve_feature_name_value_for_org(organization, val)
- for name, val in results.items()
+ for name, val in results_for_org.items()
if name.startswith("organization")
}
- return {result_key: results}
+ return {result_key: results_for_org}
with patch("sentry.features.has") as features_has:
features_has.side_effect = features_override
diff --git a/tests/sentry/tasks/test_on_demand_metrics.py b/tests/sentry/tasks/test_on_demand_metrics.py
index 659efc06fd7e45..5fd62ceca5cc2b 100644
--- a/tests/sentry/tasks/test_on_demand_metrics.py
+++ b/tests/sentry/tasks/test_on_demand_metrics.py
@@ -364,7 +364,7 @@ def project(organization: Organization) -> Project:
)
@django_db_all
def test_schedule_on_demand_check(
- feature_flags: set[str],
+ feature_flags: dict[str, bool],
option_enable: bool,
option_rollout: bool,
option_batch_size: float,
@@ -460,7 +460,7 @@ def test_schedule_on_demand_check(
def test_process_widget_specs(
raw_snql_query: Any,
_set_cardinality_cache: Any,
- feature_flags: set[str],
+ feature_flags: dict[str, bool],
option_enable: bool,
widget_query_ids: Sequence[int],
set_high_cardinality: bool,
From 6f8cccbd97b5edeceb76e4a3de011f1338a51ec7 Mon Sep 17 00:00:00 2001
From: Dan Fuller
Date: Thu, 3 Oct 2024 10:54:22 -0700
Subject: [PATCH 078/139] chore(registry): Formalize registry pattern in the
codebase (#78542)
We use registries everywhere, but there's no standard way to implement
them, and so usually they're just copy/pasted. This pr implements a
standard registry class that allows a string to be mapped to a generic
object.
Usually this will be a Callable, but can be any type that you want to
register. One nice advantage of making the registry formal, is that you
can declare your registry with a unique name, and then it's much more
clear what things are registered to, and also to search for them.
As an example, we define our registry like this somewhere:
```
workflow_engine_registry = Registry[DetectorHandler]()
```
Then decorate like:
```
@workflow_engine_registry.register("metric")
class MetricDetectorHandler(DetectorHandler):
pass
```
---
src/sentry/utils/registry.py | 35 +++++++++++++++++++++++++++++
tests/sentry/utils/test_registry.py | 28 +++++++++++++++++++++++
2 files changed, 63 insertions(+)
create mode 100644 src/sentry/utils/registry.py
create mode 100644 tests/sentry/utils/test_registry.py
diff --git a/src/sentry/utils/registry.py b/src/sentry/utils/registry.py
new file mode 100644
index 00000000000000..1174cb876ac86f
--- /dev/null
+++ b/src/sentry/utils/registry.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+from typing import Generic, TypeVar
+
+
+class AlreadyRegisteredError(ValueError):
+ pass
+
+
+class NoRegistrationExistsError(ValueError):
+ pass
+
+
+T = TypeVar("T")
+
+
+class Registry(Generic[T]):
+ def __init__(self):
+ self.registrations: dict[str, T] = {}
+
+ def register(self, key: str):
+ def inner(item: T) -> T:
+ if key in self.registrations:
+ raise AlreadyRegisteredError(
+ f"A registration already exists for {key}: {self.registrations[key]}"
+ )
+ self.registrations[key] = item
+ return item
+
+ return inner
+
+ def get(self, key: str) -> T:
+ if key not in self.registrations:
+ raise NoRegistrationExistsError(f"No registration exists for {key}")
+ return self.registrations[key]
diff --git a/tests/sentry/utils/test_registry.py b/tests/sentry/utils/test_registry.py
new file mode 100644
index 00000000000000..a922fb65e8c52b
--- /dev/null
+++ b/tests/sentry/utils/test_registry.py
@@ -0,0 +1,28 @@
+from collections.abc import Callable
+
+import pytest
+
+from sentry.testutils.cases import TestCase
+from sentry.utils.registry import AlreadyRegisteredError, NoRegistrationExistsError, Registry
+
+
+class RegistryTest(TestCase):
+ def test(self):
+ test_registry = Registry[Callable]()
+
+ @test_registry.register("something")
+ def registered_func():
+ pass
+
+ def unregistered_func():
+ pass
+
+ assert test_registry.get("something") == registered_func
+ with pytest.raises(NoRegistrationExistsError):
+ test_registry.get("something else")
+
+ with pytest.raises(AlreadyRegisteredError):
+ test_registry.register("something")(unregistered_func)
+
+ test_registry.register("something else")(unregistered_func)
+ assert test_registry.get("something else") == unregistered_func
From 29ebb3306245c02d7ba4928a12b6a6402e7eeff7 Mon Sep 17 00:00:00 2001
From: Harshitha Durai <76853136+harshithadurai@users.noreply.github.com>
Date: Thu, 3 Oct 2024 14:10:48 -0400
Subject: [PATCH 079/139] feat(dashboards): Add ff for access protected editing
of dashboards (#78551)
Feature flag for:
[#7855](https://github.com/getsentry/sentry/issues/78550)
Co-authored-by: harshithadurai
---
src/sentry/features/temporary.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 62626ae2071af9..c3f0e6f7d211f3 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -101,6 +101,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:dashboards-releases-on-charts", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable equations for Big Number widgets
manager.add("organizations:dashboards-bignumber-equations", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
+ # Enable access protected editing of dashboards
+ manager.add("organizations:dashboards-edit-access", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable the dev toolbar PoC code for employees
# Data Secrecy
manager.add("organizations:data-secrecy", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
From 379e83af11f5fb1a5fee2421fd2a647208f9a499 Mon Sep 17 00:00:00 2001
From: Malachi Willey
Date: Thu, 3 Oct 2024 11:25:48 -0700
Subject: [PATCH 080/139] chore(api): Remove 'helpful' from allowed group event
values (#78530)
---
src/sentry/api/urls.py | 2 +-
.../issues/endpoints/group_event_details.py | 6 ++---
.../endpoints/test_group_event_details.py | 22 +++++++++----------
3 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index 1110a8d33ec45f..f92acdc39da6ab 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -721,7 +721,7 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
name=f"{name_prefix}-group-events",
),
re_path(
- r"^(?P[^\/]+)/events/(?P(?:latest|oldest|helpful|recommended|\d+|[A-Fa-f0-9-]{32,36}))/$",
+ r"^(?P[^\/]+)/events/(?P(?:latest|oldest|recommended|\d+|[A-Fa-f0-9-]{32,36}))/$",
GroupEventDetailsEndpoint.as_view(),
name=f"{name_prefix}-group-event-details",
),
diff --git a/src/sentry/issues/endpoints/group_event_details.py b/src/sentry/issues/endpoints/group_event_details.py
index bd8521b6df6c85..0d5d690db4b2c1 100644
--- a/src/sentry/issues/endpoints/group_event_details.py
+++ b/src/sentry/issues/endpoints/group_event_details.py
@@ -115,10 +115,10 @@ class GroupEventDetailsEndpoint(GroupEndpoint):
def get(self, request: Request, group: Group, event_id: str) -> Response:
"""
- Retrieve the latest(most recent), oldest, or most helpful Event for an Issue
+ Retrieve the latest(most recent), oldest, or recommended Event for an Issue
``````````````````````````````````````
- Retrieves the details of the latest/oldest/most-helpful event for an issue.
+ Retrieves the details of the latest/oldest/recommended event for an issue.
:pparam string group_id: the ID of the issue
"""
@@ -133,7 +133,7 @@ def get(self, request: Request, group: Group, event_id: str) -> Response:
elif event_id == "oldest":
with metrics.timer("api.endpoints.group_event_details.get", tags={"type": "oldest"}):
event = group.get_oldest_event_for_environments(environment_names)
- elif event_id in ("helpful", "recommended"):
+ elif event_id == "recommended":
query = request.GET.get("query")
if query:
with metrics.timer(
diff --git a/tests/sentry/issues/endpoints/test_group_event_details.py b/tests/sentry/issues/endpoints/test_group_event_details.py
index dd7a26bb209d54..977b3120f1eaeb 100644
--- a/tests/sentry/issues/endpoints/test_group_event_details.py
+++ b/tests/sentry/issues/endpoints/test_group_event_details.py
@@ -157,7 +157,7 @@ def test_get_simple_helpful(self):
},
project_id=self.project_1.id,
)
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, format="json")
assert response.status_code == 200, response.content
@@ -193,7 +193,7 @@ def test_get_helpful_event_id(self):
},
project_id=self.project_1.id,
)
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, format="json")
assert response.status_code == 200, response.content
@@ -241,7 +241,7 @@ def test_get_helpful_replay_id_order(self):
project_id=self.project_1.id,
)
- url = f"/api/0/issues/{self.event_d.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_d.group.id}/events/recommended/"
response = self.client.get(url, format="json")
assert response.status_code == 200, response.content
@@ -250,7 +250,7 @@ def test_get_helpful_replay_id_order(self):
assert response.data["nextEventID"] == str(self.event_f.event_id)
def test_with_empty_query(self):
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, {"query": ""}, format="json")
assert response.status_code == 200, response.content
@@ -259,7 +259,7 @@ def test_with_empty_query(self):
assert response.data["nextEventID"] is None
def test_issue_filter_query_ignored(self):
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, {"query": "is:unresolved"}, format="json")
assert response.status_code == 200, response.content
@@ -268,7 +268,7 @@ def test_issue_filter_query_ignored(self):
assert response.data["nextEventID"] is None
def test_event_release_query(self):
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, {"query": f"release:{self.release_version}"}, format="json")
assert response.status_code == 200, response.content
@@ -292,7 +292,7 @@ def test_event_release_semver_query(self):
assert release.version == "test@1.2.3"
assert release.is_semver_release
- url = f"/api/0/issues/{event_g.group.id}/events/helpful/"
+ url = f"/api/0/issues/{event_g.group.id}/events/recommended/"
response = self.client.get(url, {"query": f"{SEMVER_ALIAS}:1.2.3"}, format="json")
assert response.status_code == 200, response.content
@@ -301,7 +301,7 @@ def test_event_release_semver_query(self):
assert response.data["nextEventID"] is None
def test_has_environment(self):
- url = f"/api/0/issues/{self.event_a.group.id}/events/helpful/"
+ url = f"/api/0/issues/{self.event_a.group.id}/events/recommended/"
response = self.client.get(url, {"query": "has:environment"}, format="json")
assert response.status_code == 200, response.content
@@ -344,7 +344,7 @@ def test_skipped_snuba_fields_ignored(self):
group.substatus = None
group.save(update_fields=["status", "substatus"])
- url = f"/api/0/issues/{group.id}/events/helpful/"
+ url = f"/api/0/issues/{group.id}/events/recommended/"
response = self.client.get(url, {"query": "is:unresolved has:environment"}, format="json")
assert response.status_code == 200, response.content
@@ -365,7 +365,7 @@ def test_query_title(self):
project_id=self.project_1.id,
)
- url = f"/api/0/issues/{event_e.group.id}/events/helpful/"
+ url = f"/api/0/issues/{event_e.group.id}/events/recommended/"
response = self.client.get(url, {"query": f'title:"{title}"'}, format="json")
assert response.status_code == 200, response.content
@@ -382,7 +382,7 @@ def test_query_issue_platform_title(self):
)
assert group_info is not None
- url = f"/api/0/issues/{group_info.group.id}/events/helpful/"
+ url = f"/api/0/issues/{group_info.group.id}/events/recommended/"
response = self.client.get(url, {"query": f'title:"{issue_title}"'}, format="json")
assert response.status_code == 200, response.content
From 6037740c3e1238072e523ddb7e0b785c24dd34ae Mon Sep 17 00:00:00 2001
From: Malachi Willey
Date: Thu, 3 Oct 2024 11:27:14 -0700
Subject: [PATCH 081/139] chore(api-docs): Add post-processing hook to fix
issue endpoint paths (#78483)
The way we define `/issues/` paths causes some problems with
drf-spectacular:
- The path is defined twice, with
`/organizations/{organization_id_slug}` prefix and without. This causes
errors that looks like: `Warning: operationId "List an Issue's Events"
has collisions`
- The `/issues/` part of the path is defined as `issues|groups` for
compatibility reasons, but we only want to use `issues` in the docs.
Without these changes, the url with look like
`/{issues_or_groups}/{id}`, but we want it to just show `/issues/{id}`.
It will also show `issues_or_groups` in the list of path parameters
which is unwanted.
---
api-docs/openapi.json | 14 ++---
api-docs/paths/events/issue-events.json | 9 ---
api-docs/paths/events/issue-hashes.json | 9 ---
api-docs/paths/events/latest-event.json | 9 ---
api-docs/paths/events/oldest-event.json | 9 ---
api-docs/paths/events/tag-details.json | 9 ---
api-docs/paths/events/tag-values.json | 9 ---
src/sentry/apidocs/hooks.py | 40 ++++++++++++
.../endpoints/events/test_group_events.py | 10 +--
.../endpoints/events/test_group_hashes.py | 2 +-
.../events/test_group_issue_details.py | 2 +-
.../events/test_group_tagkey_values.py | 2 +-
tests/apidocs/test_hooks.py | 62 +++++++++++++++++++
13 files changed, 115 insertions(+), 71 deletions(-)
create mode 100644 tests/apidocs/test_hooks.py
diff --git a/api-docs/openapi.json b/api-docs/openapi.json
index fb16c302223e16..9d2c845c4984cd 100644
--- a/api-docs/openapi.json
+++ b/api-docs/openapi.json
@@ -135,25 +135,25 @@
"/api/0/projects/{organization_id_or_slug}/{project_id_or_slug}/issues/": {
"$ref": "paths/events/project-issues.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/values/": {
+ "/api/0/issues/{issue_id}/tags/{key}/values/": {
"$ref": "paths/events/tag-values.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/tags/{key}/": {
+ "/api/0/issues/{issue_id}/tags/{key}/": {
"$ref": "paths/events/tag-details.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/hashes/": {
+ "/api/0/issues/{issue_id}/hashes/": {
"$ref": "paths/events/issue-hashes.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/oldest/": {
+ "/api/0/issues/{issue_id}/events/oldest/": {
"$ref": "paths/events/oldest-event.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/latest/": {
+ "/api/0/issues/{issue_id}/events/latest/": {
"$ref": "paths/events/latest-event.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/events/": {
+ "/api/0/issues/{issue_id}/events/": {
"$ref": "paths/events/issue-events.json"
},
- "/api/0/organizations/{organization_id_or_slug}/issues/{issue_id}/": {
+ "/api/0/issues/{issue_id}/": {
"$ref": "paths/events/issue-details.json"
},
"/api/0/organizations/{organization_id_or_slug}/releases/": {
diff --git a/api-docs/paths/events/issue-events.json b/api-docs/paths/events/issue-events.json
index 3e498fe26c7e99..63332e213fd146 100644
--- a/api-docs/paths/events/issue-events.json
+++ b/api-docs/paths/events/issue-events.json
@@ -4,15 +4,6 @@
"description": "This endpoint lists an issue's events.",
"operationId": "List an Issue's Events",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issues belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/issue-hashes.json b/api-docs/paths/events/issue-hashes.json
index 6b3ba88548b6de..77acd0241f8cb9 100644
--- a/api-docs/paths/events/issue-hashes.json
+++ b/api-docs/paths/events/issue-hashes.json
@@ -4,15 +4,6 @@
"description": "This endpoint lists an issue's hashes, which are the generated checksums used to aggregate individual events.",
"operationId": "List an Issue's Hashes",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/latest-event.json b/api-docs/paths/events/latest-event.json
index 42f6adba6b1391..ebc69936fda854 100644
--- a/api-docs/paths/events/latest-event.json
+++ b/api-docs/paths/events/latest-event.json
@@ -4,15 +4,6 @@
"description": "Retrieves the details of the latest event for an issue.",
"operationId": "Retrieve the Latest Event for an Issue",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/oldest-event.json b/api-docs/paths/events/oldest-event.json
index bcc625688907ee..78c27789bb5721 100644
--- a/api-docs/paths/events/oldest-event.json
+++ b/api-docs/paths/events/oldest-event.json
@@ -4,15 +4,6 @@
"description": "Retrieves the details of the oldest event for an issue.",
"operationId": "Retrieve the Oldest Event for an Issue",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belong to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/tag-details.json b/api-docs/paths/events/tag-details.json
index bd286ac3fc57c3..d2b1cc93ba44e3 100644
--- a/api-docs/paths/events/tag-details.json
+++ b/api-docs/paths/events/tag-details.json
@@ -4,15 +4,6 @@
"description": "Returns details for given tag key related to an issue.",
"operationId": "Retrieve Tag Details",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/api-docs/paths/events/tag-values.json b/api-docs/paths/events/tag-values.json
index f25210f9053421..323b3d33bc8f8d 100644
--- a/api-docs/paths/events/tag-values.json
+++ b/api-docs/paths/events/tag-values.json
@@ -4,15 +4,6 @@
"description": "Returns details for given tag key related to an issue. \n\nWhen [paginated](/api/pagination) can return at most 1000 values.",
"operationId": "List a Tag's Values Related to an Issue",
"parameters": [
- {
- "name": "organization_id_or_slug",
- "in": "path",
- "description": "The ID or slug of the organization the issue belongs to.",
- "required": true,
- "schema": {
- "type": "string"
- }
- },
{
"name": "issue_id",
"in": "path",
diff --git a/src/sentry/apidocs/hooks.py b/src/sentry/apidocs/hooks.py
index 364ca5e1444095..18ed0e66162daa 100644
--- a/src/sentry/apidocs/hooks.py
+++ b/src/sentry/apidocs/hooks.py
@@ -224,6 +224,8 @@ def _validate_request_body(
def custom_postprocessing_hook(result: Any, generator: Any, **kwargs: Any) -> Any:
+ _fix_issue_paths(result)
+
# Fetch schema component references
schema_components = result["components"]["schemas"]
@@ -281,3 +283,41 @@ def _check_tag(method_info: Mapping[str, Any], endpoint_name: str) -> None:
def _check_description(json_body: Mapping[str, Any], err_str: str) -> None:
if json_body.get("description") is None:
raise SentryApiBuildError(err_str)
+
+
+def _fix_issue_paths(result: Any) -> Any:
+ """
+ The way we define `/issues/` paths causes some problems with drf-spectacular:
+ - The path is defined twice, with `/organizations/{organization_id_slug}` prefix and without
+ - The `/issues/` part of the path is defined as `issues|groups` for compatibility reasons,
+ but we only want to use `issues` in the docs
+
+ This function removes duplicate paths, removes the `issues|groups` path parameter and
+ replaces it with `issues` in the path.
+ """
+ items = list(result["paths"].items())
+
+ deleted_paths = []
+ modified_paths = []
+
+ for path, endpoint in items:
+ if "{var}/{issue_id}" in path:
+ if path.startswith("/api/0/organizations/{organization_id_or_slug}/"):
+ deleted_paths.append(path)
+ else:
+ modified_paths.append(path)
+
+ for path in deleted_paths:
+ del result["paths"][path]
+
+ for path in modified_paths:
+ updated_path = path.replace("{var}/{issue_id}", "issues/{issue_id}")
+ endpoint = result["paths"][path]
+ for method in endpoint.keys():
+ endpoint[method]["parameters"] = [
+ param
+ for param in endpoint[method]["parameters"]
+ if not (param["in"] == "path" and param["name"] == "var")
+ ]
+ result["paths"][updated_path] = endpoint
+ del result["paths"][path]
diff --git a/tests/apidocs/endpoints/events/test_group_events.py b/tests/apidocs/endpoints/events/test_group_events.py
index f92be9c82ecead..052dc2ddb76748 100644
--- a/tests/apidocs/endpoints/events/test_group_events.py
+++ b/tests/apidocs/endpoints/events/test_group_events.py
@@ -30,7 +30,7 @@ def setUp(self):
class ProjectGroupEventsDocs(ProjectGroupEventBase):
def setUp(self):
super().setUp()
- self.url = f"/api/0/organizations/{self.organization.slug}/issues/{self.group_id}/events/"
+ self.url = f"/api/0/issues/{self.group_id}/events/"
def test_get(self):
response = self.client.get(self.url)
@@ -42,9 +42,7 @@ def test_get(self):
class ProjectGroupEventsLatestDocs(ProjectGroupEventBase):
def setUp(self):
super().setUp()
- self.url = (
- f"/api/0/organizations/{self.organization.slug}/issues/{self.group_id}/events/latest/"
- )
+ self.url = f"/api/0/issues/{self.group_id}/events/latest/"
def test_get(self):
response = self.client.get(self.url)
@@ -56,9 +54,7 @@ def test_get(self):
class ProjectGroupEventsOldestDocs(ProjectGroupEventBase):
def setUp(self):
super().setUp()
- self.url = (
- f"/api/0/organizations/{self.organization.slug}/issues/{self.group_id}/events/oldest/"
- )
+ self.url = f"/api/0/issues/{self.group_id}/events/oldest/"
def test_get(self):
response = self.client.get(self.url)
diff --git a/tests/apidocs/endpoints/events/test_group_hashes.py b/tests/apidocs/endpoints/events/test_group_hashes.py
index 78df9f8507fda2..e4601bf57c4393 100644
--- a/tests/apidocs/endpoints/events/test_group_hashes.py
+++ b/tests/apidocs/endpoints/events/test_group_hashes.py
@@ -8,7 +8,7 @@ def setUp(self):
self.create_event("a")
event = self.create_event("b")
- self.url = f"/api/0/organizations/{self.organization.slug}/issues/{event.group_id}/hashes/"
+ self.url = f"/api/0/issues/{event.group_id}/hashes/"
self.login_as(user=self.user)
diff --git a/tests/apidocs/endpoints/events/test_group_issue_details.py b/tests/apidocs/endpoints/events/test_group_issue_details.py
index 0fcaff5822edd5..595b9bc48cd227 100644
--- a/tests/apidocs/endpoints/events/test_group_issue_details.py
+++ b/tests/apidocs/endpoints/events/test_group_issue_details.py
@@ -24,7 +24,7 @@ def setUp(self):
for timestamp in last_release.values():
event = self.create_event("c", release="1.0a", timestamp=iso_format(timestamp))
- self.url = f"/api/0/organizations/{self.organization.slug}/issues/{event.group.id}/"
+ self.url = f"/api/0/issues/{event.group.id}/"
self.login_as(user=self.user)
diff --git a/tests/apidocs/endpoints/events/test_group_tagkey_values.py b/tests/apidocs/endpoints/events/test_group_tagkey_values.py
index 001404c99520c1..fbace8fac0193f 100644
--- a/tests/apidocs/endpoints/events/test_group_tagkey_values.py
+++ b/tests/apidocs/endpoints/events/test_group_tagkey_values.py
@@ -10,7 +10,7 @@ def setUp(self):
self.login_as(user=self.user)
- self.url = f"/api/0/organizations/{self.organization.slug}/issues/{event.group_id}/tags/{key}/values/"
+ self.url = f"/api/0/issues/{event.group_id}/tags/{key}/values/"
def test_get(self):
response = self.client.get(self.url)
diff --git a/tests/apidocs/test_hooks.py b/tests/apidocs/test_hooks.py
new file mode 100644
index 00000000000000..656af17528896c
--- /dev/null
+++ b/tests/apidocs/test_hooks.py
@@ -0,0 +1,62 @@
+from sentry.apidocs.hooks import custom_postprocessing_hook
+from sentry.testutils.cases import TestCase
+
+
+class FixIssueRoutesTest(TestCase):
+ def test_issue_route_fixes(self):
+ BEFORE = {
+ "components": {"schemas": {}},
+ "paths": {
+ "/api/0/organizations/{organization_id_or_slug}/{var}/{issue_id}/": {
+ "get": {
+ "tags": ["Events"],
+ "description": "Get issues",
+ "operationId": "get issue",
+ "parameters": [],
+ }
+ },
+ "/api/0/{var}/{issue_id}/": {
+ "get": {
+ "tags": ["Events"],
+ "description": "Get issues",
+ "operationId": "get issue",
+ "parameters": [],
+ }
+ },
+ "/api/0/some/path/": {
+ "get": {
+ "tags": ["Events"],
+ "description": "Something else",
+ "operationId": "get something",
+ "parameters": [],
+ }
+ },
+ },
+ }
+
+ # Issue route with /organizations/{organization_id_or_slug}/ should be removed
+ # Issue route with /{var}/{issue_id}/ should be renamed to /issues/{issue_id}/
+ # "var" path parameter should be removed
+ AFTER = {
+ "paths": {
+ "/api/0/issues/{issue_id}/": {
+ "get": {
+ "tags": ["Events"],
+ "description": "Get issues",
+ "operationId": "get issue",
+ "parameters": [],
+ }
+ },
+ "/api/0/some/path/": {
+ "get": {
+ "tags": ["Events"],
+ "description": "Something else",
+ "operationId": "get something",
+ "parameters": [],
+ }
+ },
+ },
+ "components": {"schemas": {}},
+ }
+
+ assert custom_postprocessing_hook(BEFORE, None) == AFTER
From a8bac0c9ea986808dbf50590131042d741496378 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 11:29:20 -0700
Subject: [PATCH 082/139] fix(ui): Restrict imports from 'moment' for
'moment-timezone' (#78561)
---
.eslintrc.js | 4 ++++
static/app/utils/getDaysSinceDate.spec.tsx | 2 +-
static/app/views/organizationStats/mapSeriesToChart.ts | 2 +-
static/app/views/performance/trends/index.spec.tsx | 2 +-
4 files changed, 7 insertions(+), 3 deletions(-)
diff --git a/.eslintrc.js b/.eslintrc.js
index 00fbe53f8a53e7..18f6abda6d7e8b 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -93,6 +93,10 @@ module.exports = {
name: 'qs',
message: 'Please use query-string instead of qs',
},
+ {
+ name: 'moment',
+ message: 'Please import moment-timezone instead of moment',
+ },
],
},
],
diff --git a/static/app/utils/getDaysSinceDate.spec.tsx b/static/app/utils/getDaysSinceDate.spec.tsx
index 52d4d596789bae..b9667ac9f4cfdb 100644
--- a/static/app/utils/getDaysSinceDate.spec.tsx
+++ b/static/app/utils/getDaysSinceDate.spec.tsx
@@ -1,7 +1,7 @@
import getDaysSinceDate from 'sentry/utils/getDaysSinceDate';
jest.mock('moment-timezone', () => {
- const moment = jest.requireActual('moment');
+ const moment = jest.requireActual('moment-timezone');
// Jun 06 2022
moment.now = jest.fn().mockReturnValue(1654492173000);
return moment;
diff --git a/static/app/views/organizationStats/mapSeriesToChart.ts b/static/app/views/organizationStats/mapSeriesToChart.ts
index 32b17a9a017e10..8a852bc56c8e01 100644
--- a/static/app/views/organizationStats/mapSeriesToChart.ts
+++ b/static/app/views/organizationStats/mapSeriesToChart.ts
@@ -1,6 +1,6 @@
import * as Sentry from '@sentry/react';
import startCase from 'lodash/startCase';
-import moment from 'moment';
+import moment from 'moment-timezone';
import type {TooltipSubLabel} from 'sentry/components/charts/components/tooltip';
import type {DataCategoryInfo, IntervalPeriod} from 'sentry/types/core';
diff --git a/static/app/views/performance/trends/index.spec.tsx b/static/app/views/performance/trends/index.spec.tsx
index 5aba00dd869c42..313c7079844014 100644
--- a/static/app/views/performance/trends/index.spec.tsx
+++ b/static/app/views/performance/trends/index.spec.tsx
@@ -32,7 +32,7 @@ const trendsViewQuery = {
};
jest.mock('moment-timezone', () => {
- const moment = jest.requireActual('moment');
+ const moment = jest.requireActual('moment-timezone');
moment.now = jest.fn().mockReturnValue(1601251200000);
return moment;
});
From 7c0af7ce6ab44cc88dc3f77c0582286c1f8e8f22 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 11:31:40 -0700
Subject: [PATCH 083/139] feat(issues): Switch tag dropdown to links (#78424)
---
.../events/eventTags/eventTagsTreeRow.tsx | 68 ++++++++-----------
1 file changed, 28 insertions(+), 40 deletions(-)
diff --git a/static/app/components/events/eventTags/eventTagsTreeRow.tsx b/static/app/components/events/eventTags/eventTagsTreeRow.tsx
index 158368da7708ff..286ae659993d9f 100644
--- a/static/app/components/events/eventTags/eventTagsTreeRow.tsx
+++ b/static/app/components/events/eventTags/eventTagsTreeRow.tsx
@@ -3,7 +3,6 @@ import styled from '@emotion/styled';
import * as qs from 'query-string';
import {openNavigateToExternalLinkModal} from 'sentry/actionCreators/modal';
-import {navigateTo} from 'sentry/actionCreators/navigation';
import {hasEveryAccess} from 'sentry/components/acl/access';
import {DropdownMenu} from 'sentry/components/dropdownMenu';
import type {TagTreeContent} from 'sentry/components/events/eventTags/eventTagsTree';
@@ -24,7 +23,6 @@ import {isUrl} from 'sentry/utils/string/isUrl';
import useCopyToClipboard from 'sentry/utils/useCopyToClipboard';
import useMutateProject from 'sentry/utils/useMutateProject';
import useOrganization from 'sentry/utils/useOrganization';
-import useRouter from 'sentry/utils/useRouter';
interface EventTagTreeRowConfig {
// Omits the dropdown of actions applicable to this tag
@@ -120,7 +118,6 @@ function EventTagsTreeRowDropdown({
project,
}: Pick) {
const organization = useOrganization();
- const router = useRouter();
const {onClick: handleCopy} = useCopyToClipboard({
text: content.value,
});
@@ -140,7 +137,6 @@ function EventTagsTreeRowDropdown({
// Skip tags already highlighted
highlightTagSet.has(originalTag.key);
const query = generateQueryWithTag({referrer}, originalTag);
- const searchQuery = `?${qs.stringify(query)}`;
const isProjectAdmin = hasEveryAccess(['project:admin'], {
organization,
project,
@@ -164,21 +160,17 @@ function EventTagsTreeRowDropdown({
key: 'view-events',
label: t('View other events with this tag value'),
hidden: !event.groupID,
- onAction: () => {
- navigateTo(
- `/organizations/${organization.slug}/issues/${event.groupID}/events/${searchQuery}`,
- router
- );
+ to: {
+ pathname: `/organizations/${organization.slug}/issues/${event.groupID}/events/`,
+ query,
},
},
{
key: 'view-issues',
label: t('View issues with this tag value'),
- onAction: () => {
- navigateTo(
- `/organizations/${organization.slug}/issues/${searchQuery}`,
- router
- );
+ to: {
+ pathname: `/organizations/${organization.slug}/issues/`,
+ query,
},
},
{
@@ -200,42 +192,38 @@ function EventTagsTreeRowDropdown({
key: 'release',
label: t('View this release'),
hidden: originalTag.key !== 'release',
- onAction: () => {
- navigateTo(
- `/organizations/${organization.slug}/releases/${encodeURIComponent(
- content.value
- )}/`,
- router
- );
- },
+ to:
+ originalTag.key === 'release'
+ ? `/organizations/${organization.slug}/releases/${encodeURIComponent(content.value)}/`
+ : undefined,
},
{
key: 'transaction',
label: t('View this transaction'),
hidden: originalTag.key !== 'transaction',
- onAction: () => {
- const transactionQuery = qs.stringify({
- project: event.projectID,
- transaction: content.value,
- referrer,
- });
- navigateTo(
- `/organizations/${organization.slug}/performance/summary/?${transactionQuery}`,
- router
- );
- },
+ to:
+ originalTag.key === 'transaction'
+ ? {
+ pathname: `/organizations/${organization.slug}/performance/summary/`,
+ query: {
+ project: event.projectID,
+ transaction: content.value,
+ referrer,
+ },
+ }
+ : undefined,
},
{
key: 'replay',
label: t('View this replay'),
hidden: originalTag.key !== 'replay_id' && originalTag.key !== 'replayId',
- onAction: () => {
- const replayQuery = qs.stringify({referrer});
- navigateTo(
- `/organizations/${organization.slug}/replays/${encodeURIComponent(content.value)}/?${replayQuery}`,
- router
- );
- },
+ to:
+ originalTag.key === 'replay_id' || originalTag.key === 'replayId'
+ ? {
+ pathname: `/organizations/${organization.slug}/replays/${encodeURIComponent(content.value)}/`,
+ query: {referrer},
+ }
+ : undefined,
},
{
key: 'external-link',
From 02692eae2b91c93b01647e4712e60a9b4abc30d3 Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Thu, 3 Oct 2024 11:35:54 -0700
Subject: [PATCH 084/139] fix(feedback): limit user report related queries by
retention period (#77074)
Fixes https://github.com/getsentry/sentry/issues/76985
Combined with https://github.com/getsentry/sentry/pull/76691 will fix
all time range errors we've been getting for user reports. Tldr; we
should explicitly respect event retention for all UserReport-related
queries. **Reports older than the retention period won't be returned by
endpoints.**
In the rare case where a report is in retention but its associated event
is not, the serialized report will exclude the user object, because we
can't query the event.
I think these are all resolved but linking them for reference:
Fixes [SENTRY-18JF](https://sentry.sentry.io/issues/4620140170/)
Fixes [SENTRY-163P](https://sentry.sentry.io/issues/4502227101/)
Fixes [SENTRY-3CAB](https://sentry.sentry.io/issues/5635234171/)
Fixes [SENTRY-189J](https://sentry.sentry.io/issues/4613821445/)
---
.../endpoints/organization_user_reports.py | 7 ++
.../api/endpoints/project_user_reports.py | 9 ++-
.../api/serializers/models/userreport.py | 8 +-
src/sentry/tasks/update_user_reports.py | 15 +++-
.../test_organization_user_reports.py | 40 ++++++++++
.../endpoints/test_project_user_reports.py | 76 +++++++++++++++----
.../sentry/tasks/test_update_user_reports.py | 34 +++++++--
7 files changed, 163 insertions(+), 26 deletions(-)
diff --git a/src/sentry/api/endpoints/organization_user_reports.py b/src/sentry/api/endpoints/organization_user_reports.py
index a083d32db550c7..705f072e3b885e 100644
--- a/src/sentry/api/endpoints/organization_user_reports.py
+++ b/src/sentry/api/endpoints/organization_user_reports.py
@@ -1,8 +1,10 @@
+from datetime import UTC, datetime, timedelta
from typing import NotRequired, TypedDict
from rest_framework.request import Request
from rest_framework.response import Response
+from sentry import quotas
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -13,6 +15,7 @@
from sentry.api.serializers import serialize
from sentry.api.serializers.models import UserReportWithGroupSerializer
from sentry.models.userreport import UserReport
+from sentry.utils.dates import epoch
class _PaginateKwargs(TypedDict):
@@ -56,6 +59,10 @@ def get(self, request: Request, organization) -> Response:
queryset = queryset.filter(
date_added__range=(filter_params["start"], filter_params["end"])
)
+ else:
+ retention = quotas.backend.get_event_retention(organization=organization)
+ start = datetime.now(UTC) - timedelta(days=retention) if retention else epoch
+ queryset = queryset.filter(date_added__gte=start)
status = request.GET.get("status", "unresolved")
paginate_kwargs: _PaginateKwargs = {}
diff --git a/src/sentry/api/endpoints/project_user_reports.py b/src/sentry/api/endpoints/project_user_reports.py
index 3660faf94254d2..b84e2c65b0959f 100644
--- a/src/sentry/api/endpoints/project_user_reports.py
+++ b/src/sentry/api/endpoints/project_user_reports.py
@@ -1,9 +1,11 @@
+from datetime import UTC, datetime, timedelta
from typing import NotRequired, TypedDict
from rest_framework import serializers
from rest_framework.request import Request
from rest_framework.response import Response
+from sentry import quotas
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.authentication import DSNAuthentication
@@ -17,6 +19,7 @@
from sentry.models.environment import Environment
from sentry.models.projectkey import ProjectKey
from sentry.models.userreport import UserReport
+from sentry.utils.dates import epoch
class UserReportSerializer(serializers.ModelSerializer):
@@ -61,7 +64,11 @@ def get(self, request: Request, project) -> Response:
except Environment.DoesNotExist:
queryset = UserReport.objects.none()
else:
- queryset = UserReport.objects.filter(project_id=project.id, group_id__isnull=False)
+ retention = quotas.backend.get_event_retention(organization=project.organization)
+ start = datetime.now(UTC) - timedelta(days=retention) if retention else epoch
+ queryset = UserReport.objects.filter(
+ project_id=project.id, group_id__isnull=False, date_added__gte=start
+ )
if environment is not None:
queryset = queryset.filter(environment_id=environment.id)
diff --git a/src/sentry/api/serializers/models/userreport.py b/src/sentry/api/serializers/models/userreport.py
index 6ec62d5051a79e..56da26e5258ddc 100644
--- a/src/sentry/api/serializers/models/userreport.py
+++ b/src/sentry/api/serializers/models/userreport.py
@@ -1,4 +1,8 @@
-from sentry import eventstore
+from datetime import timedelta
+
+from django.utils import timezone
+
+from sentry import eventstore, quotas
from sentry.api.serializers import Serializer, register, serialize
from sentry.eventstore.models import Event
from sentry.models.group import Group
@@ -14,11 +18,13 @@ def get_attrs(self, item_list, user, **kwargs):
attrs = {}
project = Project.objects.get(id=item_list[0].project_id)
+ retention = quotas.backend.get_event_retention(organization=project.organization)
events = eventstore.backend.get_events(
filter=eventstore.Filter(
event_ids=[item.event_id for item in item_list],
project_ids=[project.id],
+ start=timezone.now() - timedelta(days=retention) if retention else None,
),
referrer="UserReportSerializer.get_attrs",
dataset=Dataset.Events,
diff --git a/src/sentry/tasks/update_user_reports.py b/src/sentry/tasks/update_user_reports.py
index 20faa44af937a2..fbdcaca4de187e 100644
--- a/src/sentry/tasks/update_user_reports.py
+++ b/src/sentry/tasks/update_user_reports.py
@@ -5,7 +5,7 @@
import sentry_sdk
from django.utils import timezone
-from sentry import eventstore, features
+from sentry import eventstore, features, quotas
from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, shim_to_feedback
from sentry.models.project import Project
from sentry.models.userreport import UserReport
@@ -24,8 +24,12 @@
)
def update_user_reports(**kwargs: Any) -> None:
now = timezone.now()
- end = kwargs.get("end", now + timedelta(minutes=5)) # +5 minutes just to catch clock skew
start = kwargs.get("start", now - timedelta(days=1))
+ end = kwargs.get("end", now + timedelta(minutes=5)) # +5 minutes just to catch clock skew
+
+ # The event query time range is [start - event_lookback, end].
+ event_lookback = kwargs.get("event_lookback", timedelta(days=1))
+
# Filter for user reports where there was no event associated with them at
# ingestion time
user_reports = UserReport.objects.filter(
@@ -54,11 +58,16 @@ def update_user_reports(**kwargs: Any) -> None:
event_ids = [r.event_id for r in reports]
report_by_event = {r.event_id: r for r in reports}
events = []
+
+ event_start = start - event_lookback
+ if retention := quotas.backend.get_event_retention(organization=project.organization):
+ event_start = max(event_start, now - timedelta(days=retention))
+
for event_id_chunk in chunked(event_ids, MAX_EVENTS):
snuba_filter = eventstore.Filter(
project_ids=[project_id],
event_ids=event_id_chunk,
- start=start - timedelta(days=1), # we go one extra day back for events
+ start=event_start,
end=end,
)
try:
diff --git a/tests/sentry/api/endpoints/test_organization_user_reports.py b/tests/sentry/api/endpoints/test_organization_user_reports.py
index acbcee1ef4a143..6d4d494289734f 100644
--- a/tests/sentry/api/endpoints/test_organization_user_reports.py
+++ b/tests/sentry/api/endpoints/test_organization_user_reports.py
@@ -1,10 +1,12 @@
from datetime import UTC, datetime, timedelta
+from unittest.mock import patch
from sentry.feedback.usecases.create_feedback import FeedbackCreationSource
from sentry.ingest.userreport import save_userreport
from sentry.models.group import GroupStatus
from sentry.models.userreport import UserReport
from sentry.testutils.cases import APITestCase, SnubaTestCase
+from sentry.testutils.helpers.datetime import iso_format
class OrganizationUserReportListTest(APITestCase, SnubaTestCase):
@@ -145,3 +147,41 @@ def test_with_event_user(self):
assert response.data[0]["comments"] == "It broke"
assert response.data[0]["user"]["name"] == "Alice"
assert response.data[0]["user"]["email"] == "alice@example.com"
+
+ @patch("sentry.quotas.backend.get_event_retention")
+ def test_retention(self, mock_get_event_retention):
+ retention_days = 21
+ mock_get_event_retention.return_value = retention_days
+ UserReport.objects.create(
+ project_id=self.project_1.id,
+ event_id="f" * 32,
+ group_id=self.group_1.id,
+ environment_id=self.env_1.id,
+ date_added=datetime.now(UTC) - timedelta(days=retention_days + 1),
+ )
+ self.run_test([self.report_1, self.report_2]) # old report is not returned
+
+ @patch("sentry.quotas.backend.get_event_retention")
+ def test_event_retention(self, mock_get_event_retention):
+ retention_days = 21
+ mock_get_event_retention.return_value = retention_days
+
+ old_event = self.store_event(
+ data={
+ "event_id": "f" * 32,
+ "timestamp": iso_format(datetime.now(UTC) - timedelta(days=retention_days + 1)),
+ "environment": self.environment.name,
+ },
+ project_id=self.project_1.id,
+ )
+ UserReport.objects.create(
+ project_id=self.project_1.id,
+ event_id=old_event.event_id,
+ environment_id=self.environment.id,
+ group_id=old_event.group.id,
+ date_added=datetime.now(UTC) - timedelta(days=1),
+ )
+
+ # We don't care what is returned here, only that no QueryOutsideRetentionError is thrown.
+ response = self.get_response(self.project_1.organization.slug)
+ assert response.status_code == 200
diff --git a/tests/sentry/api/endpoints/test_project_user_reports.py b/tests/sentry/api/endpoints/test_project_user_reports.py
index 89135abe7d6925..982866e6218f81 100644
--- a/tests/sentry/api/endpoints/test_project_user_reports.py
+++ b/tests/sentry/api/endpoints/test_project_user_reports.py
@@ -5,11 +5,16 @@
from django.utils import timezone
from sentry.models.group import GroupStatus
+from sentry.models.project import Project
from sentry.models.userreport import UserReport
from sentry.testutils.cases import APITestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
+def _make_url(project: Project):
+ return f"/api/0/projects/{project.organization.slug}/{project.slug}/user-feedback/"
+
+
class ProjectUserReportListTest(APITestCase, SnubaTestCase):
def setUp(self):
super().setUp()
@@ -104,7 +109,7 @@ def test_simple(self):
group_id=group2.id,
)
- url = f"/api/0/projects/{project.organization.slug}/{project.slug}/user-feedback/"
+ url = _make_url(project)
response = self.client.get(url, format="json")
@@ -116,7 +121,7 @@ def test_cannot_access_with_dsn_auth(self):
project = self.create_project()
project_key = self.create_project_key(project=project)
- url = f"/api/0/projects/{project.organization.slug}/{project.slug}/user-feedback/"
+ url = _make_url(project)
response = self.client.get(url, HTTP_AUTHORIZATION=f"DSN {project_key.dsn_public}")
@@ -148,7 +153,7 @@ def test_all_reports(self):
group.substatus = None
group.save()
- url = f"/api/0/projects/{project.organization.slug}/{project.slug}/user-feedback/"
+ url = _make_url(project)
response = self.client.get(f"{url}?status=", format="json")
@@ -159,9 +164,7 @@ def test_all_reports(self):
def test_environments(self):
self.login_as(user=self.user)
- base_url = (
- f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
- )
+ base_url = _make_url(self.project)
# Specify environment
response = self.client.get(base_url + "?environment=production")
@@ -187,6 +190,49 @@ def test_environments(self):
assert response.status_code == 200
assert response.data == []
+ @patch("sentry.quotas.backend.get_event_retention")
+ def test_retention(self, mock_get_event_retention):
+ self.login_as(user=self.user)
+ retention_days = 21
+ mock_get_event_retention.return_value = retention_days
+
+ UserReport.objects.all().delete() # clear reports saved in setup
+ UserReport.objects.create(
+ project_id=self.project.id,
+ event_id="f" * 32,
+ environment_id=self.environment.id,
+ group_id=123,
+ date_added=before_now(days=retention_days + 1),
+ )
+ response = self.client.get(_make_url(self.project))
+ assert response.status_code == 200
+ assert len(response.data) == 0
+
+ @patch("sentry.quotas.backend.get_event_retention")
+ def test_event_retention(self, mock_get_event_retention):
+ self.login_as(user=self.user)
+ retention_days = 21
+ mock_get_event_retention.return_value = retention_days
+
+ old_event = self.store_event(
+ data={
+ "event_id": "f" * 32,
+ "timestamp": iso_format(before_now(days=retention_days + 1)),
+ "environment": self.environment.name,
+ },
+ project_id=self.project.id,
+ )
+ UserReport.objects.create(
+ project_id=self.project.id,
+ event_id=old_event.event_id,
+ environment_id=self.environment.id,
+ group_id=old_event.group.id,
+ date_added=before_now(days=1),
+ )
+ response = self.client.get(_make_url(self.project))
+ # We don't care what is returned here, only that no QueryOutsideRetentionError is thrown.
+ assert response.status_code == 200
+
class CreateProjectUserReportTest(APITestCase, SnubaTestCase):
def setUp(self):
@@ -212,7 +258,7 @@ def setUp(self):
def test_simple(self):
self.login_as(user=self.user)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -235,7 +281,7 @@ def test_simple(self):
def test_with_dsn_auth(self):
project_key = self.create_project_key(project=self.project)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -256,7 +302,7 @@ def test_with_dsn_auth_invalid_project(self):
project2 = self.create_project()
project_key = self.create_project_key(project=self.project)
- url = f"/api/0/projects/{project2.organization.slug}/{project2.slug}/user-feedback/"
+ url = _make_url(project2)
response = self.client.post(
url,
@@ -283,7 +329,7 @@ def test_already_present(self):
comments="",
)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -317,7 +363,7 @@ def test_already_present_after_deadline(self):
date_added=timezone.now() - timedelta(minutes=10),
)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -334,7 +380,7 @@ def test_already_present_after_deadline(self):
def test_after_event_deadline(self):
self.login_as(user=self.user)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -351,7 +397,7 @@ def test_after_event_deadline(self):
def test_environments(self):
self.login_as(user=self.user)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
response = self.client.post(
url,
@@ -384,7 +430,7 @@ def test_simple_shim_to_feedback(self, mock_produce_occurrence_to_kafka):
)
self.login_as(user=self.user)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
with self.feature("organizations:user-feedback-ingest"):
response = self.client.post(
@@ -432,7 +478,7 @@ def test_simple_shim_to_feedback_no_event_should_not_call(
):
self.login_as(user=self.user)
- url = f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/user-feedback/"
+ url = _make_url(self.project)
event_id = uuid4().hex
with self.feature("organizations:user-feedback-ingest"):
response = self.client.post(
diff --git a/tests/sentry/tasks/test_update_user_reports.py b/tests/sentry/tasks/test_update_user_reports.py
index 5712f2022f1f68..9e2b07b55f3fc9 100644
--- a/tests/sentry/tasks/test_update_user_reports.py
+++ b/tests/sentry/tasks/test_update_user_reports.py
@@ -8,6 +8,7 @@
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.datetime import iso_format
from sentry.testutils.skips import requires_snuba
+from sentry.utils.dates import epoch
pytestmark = [requires_snuba]
@@ -44,7 +45,7 @@ def test_simple(self):
assert report3.group_id is None
assert report3.environment_id is None
- def test_start_end_reports(self):
+ def test_report_timerange(self):
# The task should only update UserReports added in the given time range.
now = timezone.now()
start = now - timedelta(days=3)
@@ -80,9 +81,9 @@ def test_start_end_reports(self):
assert report4.group_id is None
assert report4.environment_id is None
- def test_start_end_events(self):
+ def test_event_timerange(self):
# The task should only query associated events from the given time range, or up to 1 day older.
- event_start_offset = timedelta(days=1)
+ event_lookback = timedelta(days=1)
now = timezone.now()
start = now - timedelta(days=3)
@@ -92,11 +93,11 @@ def test_start_end_events(self):
project = self.create_project()
event1, _ = self.create_event_and_report(
project.id,
- event_dt=start - event_start_offset - timedelta(hours=1),
+ event_dt=start - event_lookback - timedelta(hours=1),
report_dt=report_dt,
)
event2, _ = self.create_event_and_report(
- project.id, event_dt=start - event_start_offset, report_dt=report_dt
+ project.id, event_dt=start - event_lookback, report_dt=report_dt
)
event3, _ = self.create_event_and_report(
project.id, event_dt=start + timedelta(hours=1), report_dt=report_dt
@@ -106,7 +107,7 @@ def test_start_end_events(self):
)
with self.tasks():
- update_user_reports(start=start, end=end)
+ update_user_reports(start=start, end=end, event_lookback=event_lookback)
report1 = UserReport.objects.get(project_id=project.id, event_id=event1.event_id)
report2 = UserReport.objects.get(project_id=project.id, event_id=event2.event_id)
@@ -161,3 +162,24 @@ def test_simple_calls_feedback_shim_if_ff_enabled(self, mock_produce_occurrence_
assert mock_event_data["platform"] == "other"
assert mock_event_data["contexts"]["feedback"]["associated_event_id"] == event1.event_id
assert mock_event_data["level"] == "error"
+
+ @patch("sentry.quotas.backend.get_event_retention")
+ def test_event_retention(self, mock_get_event_retention):
+ retention_days = 21
+ mock_get_event_retention.return_value = retention_days
+ project = self.create_project()
+ now = timezone.now()
+
+ event_dt = now - timedelta(days=retention_days + 1)
+ report_dt = now - timedelta(days=retention_days - 1)
+ event_lookback = timedelta(days=3)
+
+ self.create_event_and_report(project.id, event_dt=event_dt, report_dt=report_dt)
+
+ with self.tasks():
+ update_user_reports(start=epoch, end=now, event_lookback=event_lookback)
+
+ assert mock_get_event_retention.call_count > 0
+ report = UserReport.objects.get()
+ assert report.group_id is None
+ assert report.environment_id is None
From 13be3a845247d0d11f8e43ccbb9c9f436c9eced7 Mon Sep 17 00:00:00 2001
From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com>
Date: Thu, 3 Oct 2024 11:55:32 -0700
Subject: [PATCH 085/139] fix(issu-views): Add undefined check for viewId
(#78563)
Fixes JAVASCRIPT-2W5R
---
static/app/views/issueList/customViewsHeader.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/static/app/views/issueList/customViewsHeader.tsx b/static/app/views/issueList/customViewsHeader.tsx
index 61249942cf958d..82dc51e1888217 100644
--- a/static/app/views/issueList/customViewsHeader.tsx
+++ b/static/app/views/issueList/customViewsHeader.tsx
@@ -333,7 +333,7 @@ function CustomViewsIssueListHeaderTabsContent({
});
});
- if (viewId.startsWith('_') && currentView) {
+ if (viewId?.startsWith('_') && currentView) {
const matchingView = newlyCreatedViews.find(
view =>
view.id &&
From 490cea74b952b523b3b4a79b8b696ee0e199f870 Mon Sep 17 00:00:00 2001
From: George Gritsouk <989898+gggritso@users.noreply.github.com>
Date: Thu, 3 Oct 2024 15:02:41 -0400
Subject: [PATCH 086/139] feat(dashboards): Big Number Widget error state
improvements (#78556)
This is a small feature and a refactor that enables it. In short, all
the code that handled value errors, loading states, and error states is
lifted out of `BigNumberWidgetVisualization` into `BigNumberWidget`.
This is a lot simpler because this way we don't have lower components
detect errors that might affect higher components. Now the visualization
component just visualizes good data. Everything else is higher, and/or
moved into `WidgetFrame` so it can be shared between different widgets.
Once that's done, I improved the layout of the error panel somewhat, and
adds support for "Retry" buttons. There's also a bit of cleanup to go
with this.
---
.../bigNumberWidget/bigNumberWidget.spec.tsx | 39 ++++++
.../bigNumberWidget.stories.tsx | 10 +-
.../bigNumberWidget/bigNumberWidget.tsx | 52 +++++++-
.../bigNumberWidgetVisualization.tsx | 119 ++++++------------
...sx => differenceToPreviousPeriodValue.tsx} | 21 ++--
.../dashboards/widgets/common/errorPanel.tsx | 15 ++-
.../dashboards/widgets/common/settings.tsx | 9 ++
.../views/dashboards/widgets/common/types.tsx | 13 +-
.../dashboards/widgets/common/widgetFrame.tsx | 38 ++++--
9 files changed, 201 insertions(+), 115 deletions(-)
rename static/app/views/dashboards/widgets/bigNumberWidget/{differenceToPreviousPeriodData.tsx => differenceToPreviousPeriodValue.tsx} (86%)
create mode 100644 static/app/views/dashboards/widgets/common/settings.tsx
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
index a6a8f97f9863a3..cd6cc2bb79f0fe 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.spec.tsx
@@ -140,11 +140,50 @@ describe('BigNumberWidget', () => {
expect(screen.getByText('—')).toBeInTheDocument();
});
+ it('Loading state takes precedence over error state', () => {
+ render(
+
+ );
+
+ expect(screen.getByText('—')).toBeInTheDocument();
+ });
+
it('Shows an error message', () => {
render( );
expect(screen.getByText('Error: Uh oh')).toBeInTheDocument();
});
+
+ it('Shows a retry button', async () => {
+ const onRetry = jest.fn();
+
+ render( );
+
+ await userEvent.click(screen.getByRole('button', {name: 'Retry'}));
+ expect(onRetry).toHaveBeenCalledTimes(1);
+ });
+
+ it('Hides other actions if there is an error and a retry handler', () => {
+ const onRetry = jest.fn();
+
+ render(
+
+ );
+
+ expect(screen.getByRole('button', {name: 'Retry'})).toBeInTheDocument();
+ expect(
+ screen.queryByRole('link', {name: 'Open in Discover'})
+ ).not.toBeInTheDocument();
+ });
});
describe('Previous Period Data', () => {
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
index aff28d5312c085..45c49cac3b98a1 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx
@@ -124,7 +124,8 @@ export default storyBook(BigNumberWidget, story => {
supports the usual loading and error states.
- The loading state shows a simple placeholder.
+ The loading state shows a simple placeholder. The error state also shows an
+ optional "Retry" button.
@@ -146,6 +147,13 @@ export default storyBook(BigNumberWidget, story => {
error={new Error('Something went wrong!')}
/>
+
+ {}}
+ />
+
);
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
index 5c6556fe580c00..b7c09c890dd7bd 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
@@ -1,6 +1,7 @@
import styled from '@emotion/styled';
import {space} from 'sentry/styles/space';
+import {defined} from 'sentry/utils';
import {
BigNumberWidgetVisualization,
type Props as BigNumberWidgetVisualizationProps,
@@ -10,26 +11,60 @@ import {
WidgetFrame,
} from 'sentry/views/dashboards/widgets/common/widgetFrame';
+import {MISSING_DATA_MESSAGE, NON_FINITE_NUMBER_MESSAGE} from '../common/settings';
+import type {DataProps, StateProps} from '../common/types';
+
+import {DEEMPHASIS_COLOR_NAME, LOADING_PLACEHOLDER} from './settings';
+
interface Props
- extends Omit,
- BigNumberWidgetVisualizationProps {}
+ extends DataProps,
+ StateProps,
+ Omit,
+ Omit {}
export function BigNumberWidget(props: Props) {
+ const {data, previousPeriodData} = props;
+
+ // TODO: Instrument getting more than one data key back as an error
+ // e.g., with data that looks like `[{'apdex()': 0.8}] this pulls out `"apdex()"` or `undefined`
+ const field = Object.keys(data?.[0] ?? {})[0];
+ const value = data?.[0]?.[field];
+ const previousPeriodValue = previousPeriodData?.[0]?.[field];
+
+ if (props.isLoading) {
+ return (
+
+ {LOADING_PLACEHOLDER}
+
+ );
+ }
+
+ let parsingError: string | undefined = undefined;
+
+ if (!defined(value)) {
+ parsingError = MISSING_DATA_MESSAGE;
+ } else if (!Number.isFinite(value) || Number.isNaN(value)) {
+ parsingError = NON_FINITE_NUMBER_MESSAGE;
+ }
+
+ const error = props.error ?? parsingError;
+
return (
@@ -41,3 +76,8 @@ const BigNumberResizeWrapper = styled('div')`
flex-grow: 1;
margin-top: ${space(1)};
`;
+
+const LoadingPlaceholder = styled('span')`
+ color: ${p => p.theme[DEEMPHASIS_COLOR_NAME]};
+ font-size: ${p => p.theme.fontSizeLarge};
+`;
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidgetVisualization.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidgetVisualization.tsx
index a4c12c1824a850..b1f2821d056751 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidgetVisualization.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidgetVisualization.tsx
@@ -2,90 +2,47 @@ import styled from '@emotion/styled';
import type {Polarity} from 'sentry/components/percentChange';
import {Tooltip} from 'sentry/components/tooltip';
-import {t} from 'sentry/locale';
import {defined} from 'sentry/utils';
import type {MetaType} from 'sentry/utils/discover/eventView';
import {getFieldRenderer} from 'sentry/utils/discover/fieldRenderers';
import {useLocation} from 'sentry/utils/useLocation';
import useOrganization from 'sentry/utils/useOrganization';
import {AutoSizedText} from 'sentry/views/dashboards/widgetCard/autoSizedText';
-import {DifferenceToPreviousPeriodData} from 'sentry/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData';
-import {
- DEEMPHASIS_COLOR_NAME,
- LOADING_PLACEHOLDER,
- NO_DATA_PLACEHOLDER,
-} from 'sentry/views/dashboards/widgets/bigNumberWidget/settings';
-import {ErrorPanel} from 'sentry/views/dashboards/widgets/common/errorPanel';
-import type {
- Meta,
- StateProps,
- TableData,
-} from 'sentry/views/dashboards/widgets/common/types';
-
-export interface Props extends StateProps {
- data?: TableData;
+import {DifferenceToPreviousPeriodValue} from 'sentry/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodValue';
+import type {Meta, TableData} from 'sentry/views/dashboards/widgets/common/types';
+
+import {DEFAULT_FIELD} from '../common/settings';
+
+export interface Props {
+ value: number;
+ field?: string;
maximumValue?: number;
meta?: Meta;
preferredPolarity?: Polarity;
- previousPeriodData?: TableData;
+ previousPeriodValue?: number;
}
export function BigNumberWidgetVisualization(props: Props) {
const {
- data,
- previousPeriodData,
+ field = DEFAULT_FIELD,
+ value,
+ previousPeriodValue,
maximumValue = Number.MAX_VALUE,
preferredPolarity,
meta,
- isLoading,
- error,
} = props;
const location = useLocation();
const organization = useOrganization();
- if (error) {
- return ;
- }
-
- // TODO: Instrument getting more than one data key back as an error
- // e.g., with data that looks like `[{'apdex()': 0.8}] this pulls out `"apdex()"` or `undefined`
- const field = Object.keys(data?.[0] ?? {})[0];
- const value = data?.[0]?.[field];
-
- if (isLoading) {
- return (
-
- {LOADING_PLACEHOLDER}
-
- );
- }
-
- if (!defined(value)) {
- return (
-
- {NO_DATA_PLACEHOLDER}
-
- );
- }
-
- if (!Number.isFinite(value) || error) {
- return ;
- }
-
- const parsedValue = Number(value);
-
// TODO: meta as MetaType is a white lie. `MetaType` doesn't know that types can be null, but they can!
- const fieldRenderer = meta
- ? getFieldRenderer(field, meta as MetaType, false)
- : renderableValue => renderableValue.toString();
-
- const doesValueHitMaximum = maximumValue ? parsedValue >= maximumValue : false;
- const clampedValue = Math.min(parsedValue, maximumValue);
+ const fieldRenderer =
+ meta && field
+ ? getFieldRenderer(field, meta as MetaType, false)
+ : renderableValue => renderableValue.toString();
- const datum = {
- [field]: clampedValue,
- };
+ const doesValueHitMaximum = maximumValue ? value >= maximumValue : false;
+ const clampedValue = Math.min(value, maximumValue);
const unit = meta?.units?.[field];
@@ -95,35 +52,41 @@ export function BigNumberWidgetVisualization(props: Props) {
unit: unit ?? undefined, // TODO: Field formatters think units can't be null but they can
};
- const rendered = fieldRenderer(datum, baggage);
-
return (
{doesValueHitMaximum ? '>' : ''}
- {rendered}
+ {fieldRenderer(
+ {
+ [field]: clampedValue,
+ },
+ baggage
+ )}
- {data && previousPeriodData && !doesValueHitMaximum && (
-
- fieldRenderer(previousDatum, baggage)
- }
- field={field}
- />
- )}
+ {defined(previousPeriodValue) &&
+ Number.isFinite(previousPeriodValue) &&
+ !Number.isNaN(previousPeriodValue) &&
+ !doesValueHitMaximum && (
+
+ fieldRenderer(previousDatum, baggage)
+ }
+ />
+ )}
);
@@ -167,7 +130,3 @@ const NumberContainerOverride = styled('div')`
white-space: nowrap;
}
`;
-
-const Deemphasize = styled('span')`
- color: ${p => p.theme[DEEMPHASIS_COLOR_NAME]};
-`;
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodValue.tsx
similarity index 86%
rename from static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx
rename to static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodValue.tsx
index 9fe293cdfcd1a3..ba7f15cb134375 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodData.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/differenceToPreviousPeriodValue.tsx
@@ -15,24 +15,23 @@ import {
} from 'sentry/views/dashboards/widgets/bigNumberWidget/settings';
import type {TableData} from 'sentry/views/dashboards/widgets/common/types';
+import {DEFAULT_FIELD} from '../common/settings';
+
interface Props {
- data: TableData;
- field: string;
- previousPeriodData: TableData;
+ previousPeriodValue: number;
renderer: (datum: TableData[number]) => React.ReactNode;
+ value: number;
+ field?: string;
preferredPolarity?: Polarity;
}
-export function DifferenceToPreviousPeriodData({
- data,
- previousPeriodData,
+export function DifferenceToPreviousPeriodValue({
+ value: currentValue,
+ previousPeriodValue: previousValue,
preferredPolarity = '',
- field,
+ field = DEFAULT_FIELD,
renderer,
}: Props) {
- const currentValue = data[0][field];
- const previousValue = previousPeriodData[0][field];
-
if (!isNumber(currentValue) || !isNumber(previousValue)) {
return {LOADING_PLACEHOLDER} ;
}
@@ -45,7 +44,7 @@ export function DifferenceToPreviousPeriodData({
// Create a fake data row so we can pass it to field renderers. Omit the +/- sign since the direction marker will indicate it
const differenceAsDatum = {
- [field]: Math.abs(difference),
+ [field ?? 'unknown']: Math.abs(difference),
};
return (
diff --git a/static/app/views/dashboards/widgets/common/errorPanel.tsx b/static/app/views/dashboards/widgets/common/errorPanel.tsx
index 9027a79495705f..640940d066cf1f 100644
--- a/static/app/views/dashboards/widgets/common/errorPanel.tsx
+++ b/static/app/views/dashboards/widgets/common/errorPanel.tsx
@@ -12,24 +12,27 @@ interface Props {
export function ErrorPanel({error}: Props) {
return (
-
+
{error?.toString()}
);
}
+const NonShrinkingWarningIcon = styled(IconWarning)`
+ flex-shrink: 0;
+`;
+
const Panel = styled('div')<{height?: string}>`
position: absolute;
inset: 0;
+ padding: ${space(0.5)} 0;
+
display: flex;
- flex-direction: column;
- justify-content: center;
- align-items: center;
- gap: ${space(0.5)};
+ gap: ${space(1)};
overflow: hidden;
color: ${p => p.theme[DEEMPHASIS_COLOR_NAME]};
- font-size: ${p => p.theme.fontSizeExtraLarge};
+ font-size: ${p => p.theme.fontSizeLarge};
`;
diff --git a/static/app/views/dashboards/widgets/common/settings.tsx b/static/app/views/dashboards/widgets/common/settings.tsx
new file mode 100644
index 00000000000000..eba012ff0ef7eb
--- /dev/null
+++ b/static/app/views/dashboards/widgets/common/settings.tsx
@@ -0,0 +1,9 @@
+import {t} from 'sentry/locale';
+
+export const MIN_WIDTH = 200;
+export const MIN_HEIGHT = 120;
+
+export const DEFAULT_FIELD = 'unknown'; // Numeric data might, in theory, have a missing field. In this case we need a fallback to provide to the field rendering pipeline. `'unknown'` will results in rendering as a string
+
+export const MISSING_DATA_MESSAGE = t('No Data');
+export const NON_FINITE_NUMBER_MESSAGE = t('Value is not a finite number.');
diff --git a/static/app/views/dashboards/widgets/common/types.tsx b/static/app/views/dashboards/widgets/common/types.tsx
index eca048bdb0f445..c7b0b653c96dac 100644
--- a/static/app/views/dashboards/widgets/common/types.tsx
+++ b/static/app/views/dashboards/widgets/common/types.tsx
@@ -3,9 +3,18 @@ export type Meta = {
units?: Record;
};
-export type TableData = Record[];
+type TableRow = Record;
+export type TableData = TableRow[];
+
+export interface DataProps {
+ data?: TableData;
+ previousPeriodData?: TableData;
+}
+
+export type ErrorProp = Error | string;
export interface StateProps {
- error?: Error | string;
+ error?: ErrorProp;
isLoading?: boolean;
+ onRetry?: () => void;
}
diff --git a/static/app/views/dashboards/widgets/common/widgetFrame.tsx b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
index 2437699680dfe5..80764a0d465ff8 100644
--- a/static/app/views/dashboards/widgets/common/widgetFrame.tsx
+++ b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
@@ -9,7 +9,11 @@ import {IconEllipsis} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
-export interface Props {
+import {ErrorPanel} from './errorPanel';
+import {MIN_HEIGHT, MIN_WIDTH} from './settings';
+import type {StateProps} from './types';
+
+export interface Props extends StateProps {
actions?: MenuItemProps[];
children?: React.ReactNode;
description?: string;
@@ -17,19 +21,33 @@ export interface Props {
}
export function WidgetFrame(props: Props) {
- const {title, description, actions, children} = props;
+ const {error} = props;
+
+ // The error state has its own set of available actions
+ const actions =
+ (error
+ ? props.onRetry
+ ? [
+ {
+ key: 'retry',
+ label: t('Retry'),
+ onAction: props.onRetry,
+ },
+ ]
+ : []
+ : props.actions) ?? [];
return (
-
- {title}
+
+ {props.title}
- {description && (
+ {props.description && (
-
+
)}
@@ -59,7 +77,9 @@ export function WidgetFrame(props: Props) {
- {children}
+
+ {props.error ? : props.children}
+
);
}
@@ -70,9 +90,9 @@ const Frame = styled('div')`
flex-direction: column;
height: 100%;
- min-height: 96px;
+ min-height: ${MIN_HEIGHT}px;
width: 100%;
- min-width: 120px;
+ min-width: ${MIN_WIDTH}px;
padding: ${space(2)};
From a41dc3108b68c638a307aee738a7a268ab738f85 Mon Sep 17 00:00:00 2001
From: Malachi Willey
Date: Thu, 3 Oct 2024 12:05:44 -0700
Subject: [PATCH 087/139] chore(query-builder): Remove feature flag checks for
issue search bars (#78395)
---
.../createSavedSearchModal.spec.tsx | 7 +-
.../editSavedSearchModal.spec.tsx | 19 +-
.../savedSearchModalContent.tsx | 10 +-
.../searchQueryBuilder/tokens/combobox.tsx | 3 +
.../searchQueryBuilder/tokens/freeText.tsx | 3 +
static/app/stores/tagStore.spec.tsx | 75 ------
static/app/stores/tagStore.tsx | 229 ------------------
static/app/utils/withIssueTags.spec.tsx | 121 ---------
static/app/utils/withIssueTags.tsx | 126 ----------
.../filterResultsStep/issuesSearchBar.tsx | 54 ++---
.../views/issueDetails/groupEvents.spec.tsx | 34 +--
static/app/views/issueDetails/groupEvents.tsx | 25 +-
.../issueSearchWithSavedSearches.spec.tsx | 10 +-
.../issueSearchWithSavedSearches.tsx | 3 +-
static/app/views/issueList/overview.spec.tsx | 183 ++++++++------
static/app/views/issueList/overview.tsx | 39 +--
.../issueList/savedIssueSearches.spec.tsx | 4 +
static/app/views/issueList/searchBar.spec.tsx | 178 +-------------
static/app/views/issueList/searchBar.tsx | 176 ++------------
.../issueList/utils/useFetchIssueTags.tsx | 74 +++---
tests/acceptance/test_issue_saved_searches.py | 10 +-
21 files changed, 251 insertions(+), 1132 deletions(-)
delete mode 100644 static/app/utils/withIssueTags.spec.tsx
delete mode 100644 static/app/utils/withIssueTags.tsx
diff --git a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
index acf80d95ce1014..00346df0e30079 100644
--- a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
+++ b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx
@@ -83,9 +83,8 @@ describe('CreateSavedSearchModal', function () {
await userEvent.click(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.click(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.paste('is:resolved');
+ await userEvent.click(screen.getAllByRole('combobox').at(-1)!);
+ await userEvent.paste('event.type:error');
await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
await userEvent.click(screen.getByRole('button', {name: 'Save'}));
@@ -96,7 +95,7 @@ describe('CreateSavedSearchModal', function () {
expect.objectContaining({
data: {
name: 'new search name',
- query: 'is:resolved',
+ query: 'is:unresolved assigned:lyn@sentry.io event.type:error',
sort: IssueSortOptions.TRENDS,
type: 0,
visibility: SavedSearchVisibility.OWNER,
diff --git a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
index 0f40f68a9a9578..919763334c32a1 100644
--- a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
+++ b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx
@@ -60,7 +60,7 @@ describe('EditSavedSearchModal', function () {
body: {
id: 'saved-search-id',
name: 'test',
- query: 'is:unresolved browser:firefox',
+ query: 'is:unresolved browser:firefox event.type:error',
sort: IssueSortOptions.TRENDS,
visibility: SavedSearchVisibility.OWNER,
},
@@ -71,11 +71,13 @@ describe('EditSavedSearchModal', function () {
await userEvent.clear(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByRole('textbox', {name: /filter issues/i}));
- await userEvent.paste('test');
-
await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
+ await userEvent.click(
+ screen.getAllByRole('combobox', {name: 'Add a search term'}).at(-1)!
+ );
+ await userEvent.paste('event.type:error');
+
await selectEvent.select(screen.getByText('Only me'), 'Users in my organization');
await userEvent.click(screen.getByRole('button', {name: 'Save'}));
@@ -86,7 +88,7 @@ describe('EditSavedSearchModal', function () {
expect.objectContaining({
data: expect.objectContaining({
name: 'new search name',
- query: 'test',
+ query: 'is:unresolved browser:firefox event.type:error',
visibility: SavedSearchVisibility.ORGANIZATION,
}),
})
@@ -119,11 +121,6 @@ describe('EditSavedSearchModal', function () {
await userEvent.clear(screen.getByRole('textbox', {name: /name/i}));
await userEvent.paste('new search name');
- await userEvent.clear(screen.getByTestId('smart-search-input'));
- await userEvent.paste('test');
-
- await selectEvent.select(screen.getByText('Last Seen'), 'Trends');
-
// Hovering over the visibility dropdown shows disabled reason
await userEvent.hover(screen.getByText(/only me/i));
await screen.findByText(/only organization admins can create global saved searches/i);
@@ -136,7 +133,7 @@ describe('EditSavedSearchModal', function () {
expect.objectContaining({
data: expect.objectContaining({
name: 'new search name',
- query: 'test',
+ query: 'is:unresolved browser:firefox',
visibility: SavedSearchVisibility.OWNER,
}),
})
diff --git a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
index b6505d703fdb99..c07d25cc906304 100644
--- a/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
+++ b/static/app/components/modals/savedSearchModal/savedSearchModalContent.tsx
@@ -55,19 +55,15 @@ export function SavedSearchModalContent({organization}: SavedSearchModalContentP
flexibleControlStateSize
required
>
- {({id, name, onChange, onBlur, disabled, value}) => (
+ {({onChange, onBlur, disabled, value}) => (
{
+ onChange={newValue => {
onChange(newValue, {});
onBlur(newValue, {});
}}
- includeLabel={false}
- useFormWrapper={false}
disabled={disabled}
- query={value}
+ initialQuery={value}
searchSource="saved_searches_modal"
/>
)}
diff --git a/static/app/components/searchQueryBuilder/tokens/combobox.tsx b/static/app/components/searchQueryBuilder/tokens/combobox.tsx
index cf93eb4f493f3a..145530e0029b48 100644
--- a/static/app/components/searchQueryBuilder/tokens/combobox.tsx
+++ b/static/app/components/searchQueryBuilder/tokens/combobox.tsx
@@ -64,6 +64,7 @@ type SearchQueryBuilderComboboxProps;
+ ['data-test-id']?: string;
/**
* If the combobox has additional information to display, passing JSX
* to this prop will display it in an overlay at the top left position.
@@ -334,6 +335,7 @@ function SearchQueryBuilderComboboxInner,
ref: ForwardedRef
) {
@@ -538,6 +540,7 @@ function SearchQueryBuilderComboboxInner onKeyDownCapture?.(e, {state})}
+ data-test-id={dataTestId}
/>
{description ? (
{keyItem =>
itemIsSection(keyItem) ? (
diff --git a/static/app/stores/tagStore.spec.tsx b/static/app/stores/tagStore.spec.tsx
index e3042f1756293e..d73f20f7a5f7aa 100644
--- a/static/app/stores/tagStore.spec.tsx
+++ b/static/app/stores/tagStore.spec.tsx
@@ -1,5 +1,3 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-
import TagStore from 'sentry/stores/tagStore';
describe('TagStore', function () {
@@ -37,79 +35,6 @@ describe('TagStore', function () {
});
});
- describe('getIssueAttributes()', function () {
- it('should populate the has tag with values', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'mytag',
- name: 'My Custom Tag',
- },
- {
- key: 'otherkey',
- name: 'My other tag',
- },
- ]);
-
- expect(TagStore.getIssueAttributes(OrganizationFixture()).has).toEqual({
- key: 'has',
- name: 'Has Tag',
- values: ['mytag', 'otherkey'],
- predefined: true,
- });
- });
-
- it('should not overwrite predefined filters', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'is',
- name: 'Custom Assigned To',
- },
- ]);
-
- const tags = TagStore.getIssueAttributes(OrganizationFixture());
- expect(tags.is).toBeTruthy();
- expect(tags.is.key).toBe('is');
- expect(tags.assigned).toBeTruthy();
- });
-
- it('should replace ignore with archive', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'is',
- name: 'Custom Assigned To',
- },
- ]);
-
- const tags = TagStore.getIssueAttributes(OrganizationFixture());
- expect(tags.is.values).toContain('archived');
- });
- });
-
- describe('getIssueTags()', function () {
- it('should have built in, state, and issue attribute tags', () => {
- TagStore.loadTagsSuccess([
- {
- key: 'mytag',
- name: 'My Custom Tag',
- },
- ]);
-
- const tags = TagStore.getIssueTags(OrganizationFixture());
-
- // state
- expect(tags.mytag).toBeTruthy();
- expect(tags.mytag.key).toBe('mytag');
-
- // attribute
- expect(tags.has).toBeTruthy();
- expect(tags.has.key).toBe('has');
-
- // built in
- expect(tags['device.family']).toBeTruthy();
- expect(tags['device.family'].key).toBe('device.family');
- });
- });
-
it('returns a stable reference from getState', () => {
TagStore.loadTagsSuccess([
{
diff --git a/static/app/stores/tagStore.tsx b/static/app/stores/tagStore.tsx
index 190a1594251169..3ab9d15c084e39 100644
--- a/static/app/stores/tagStore.tsx
+++ b/static/app/stores/tagStore.tsx
@@ -1,55 +1,10 @@
import {createStore} from 'reflux';
-import {ItemType, type SearchGroup} from 'sentry/components/smartSearchBar/types';
import type {Tag, TagCollection} from 'sentry/types/group';
-import {
- getIssueTitleFromType,
- IssueCategory,
- IssueType,
- PriorityLevel,
-} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import {SEMVER_TAGS} from 'sentry/utils/discover/fields';
-import {
- FieldKey,
- FieldKind,
- ISSUE_EVENT_PROPERTY_FIELDS,
- ISSUE_FIELDS,
- ISSUE_PROPERTY_FIELDS,
-} from 'sentry/utils/fields';
import type {StrictStoreDefinition} from './types';
-// This list is only used on issues. Events/discover
-// have their own field list that exists elsewhere.
-const BUILTIN_TAGS = ISSUE_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag};
- return acc;
-}, {});
-
-// For the new query builder, we need to differentiate between issue and event fields
-const BUILTIN_TAGS_BY_CATEGORY = {
- ...ISSUE_PROPERTY_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag, predefined: true, kind: FieldKind.ISSUE_FIELD};
- return acc;
- }, {}),
- ...ISSUE_EVENT_PROPERTY_FIELDS.reduce((acc, tag) => {
- acc[tag] = {key: tag, name: tag, predefined: false, kind: FieldKind.EVENT_FIELD};
- return acc;
- }, {}),
-};
-
-export function getBuiltInTags(organization: Organization) {
- if (organization.features.includes('issue-stream-search-query-builder')) {
- return BUILTIN_TAGS_BY_CATEGORY;
- }
-
- return BUILTIN_TAGS;
-}
-
interface TagStoreDefinition extends StrictStoreDefinition {
- getIssueAttributes(organization: Organization): TagCollection;
- getIssueTags(org: Organization): TagCollection;
loadTagsSuccess(data: Tag[]): void;
reset(): void;
}
@@ -63,190 +18,6 @@ const storeConfig: TagStoreDefinition = {
this.state = {};
},
- /**
- * Gets only predefined issue attributes
- */
- getIssueAttributes(organization: Organization) {
- // TODO(mitsuhiko): what do we do with translations here?
- const isSuggestions = [
- 'resolved',
- 'unresolved',
- ...['archived', 'escalating', 'new', 'ongoing', 'regressed'],
- 'assigned',
- 'unassigned',
- 'for_review',
- 'linked',
- 'unlinked',
- ];
-
- const sortedTagKeys = Object.keys(this.state).sort((a, b) => {
- return a.toLowerCase().localeCompare(b.toLowerCase());
- });
-
- const builtinTags = getBuiltInTags(organization);
-
- const tagCollection = {
- [FieldKey.IS]: {
- ...builtinTags[FieldKey.IS],
- key: FieldKey.IS,
- name: 'Status',
- values: isSuggestions,
- maxSuggestedValues: isSuggestions.length,
- predefined: true,
- },
- [FieldKey.HAS]: {
- ...builtinTags[FieldKey.HAS],
- key: FieldKey.HAS,
- name: 'Has Tag',
- values: sortedTagKeys,
- predefined: true,
- },
- [FieldKey.ASSIGNED]: {
- ...builtinTags[FieldKey.ASSIGNED],
- key: FieldKey.ASSIGNED,
- name: 'Assigned To',
- values: [],
- predefined: true,
- },
- [FieldKey.BOOKMARKS]: {
- ...builtinTags[FieldKey.BOOKMARKS],
- name: 'Bookmarked By',
- values: [],
- predefined: true,
- },
- [FieldKey.ISSUE_CATEGORY]: {
- ...builtinTags[FieldKey.ISSUE_CATEGORY],
- name: 'Issue Category',
- values: [
- IssueCategory.ERROR,
- IssueCategory.PERFORMANCE,
- IssueCategory.REPLAY,
- IssueCategory.CRON,
- IssueCategory.UPTIME,
- ],
- predefined: true,
- },
- [FieldKey.ISSUE_TYPE]: {
- ...builtinTags[FieldKey.ISSUE_TYPE],
- name: 'Issue Type',
- values: [
- IssueType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES,
- IssueType.PERFORMANCE_N_PLUS_ONE_API_CALLS,
- IssueType.PERFORMANCE_CONSECUTIVE_DB_QUERIES,
- IssueType.PERFORMANCE_SLOW_DB_QUERY,
- IssueType.PERFORMANCE_RENDER_BLOCKING_ASSET,
- IssueType.PERFORMANCE_UNCOMPRESSED_ASSET,
- IssueType.PERFORMANCE_ENDPOINT_REGRESSION,
- IssueType.PROFILE_FILE_IO_MAIN_THREAD,
- IssueType.PROFILE_IMAGE_DECODE_MAIN_THREAD,
- IssueType.PROFILE_JSON_DECODE_MAIN_THREAD,
- IssueType.PROFILE_REGEX_MAIN_THREAD,
- IssueType.PROFILE_FUNCTION_REGRESSION,
- ].map(value => ({
- icon: null,
- title: value,
- name: value,
- documentation: getIssueTitleFromType(value),
- value,
- type: ItemType.TAG_VALUE,
- children: [],
- })) as SearchGroup[],
- predefined: true,
- },
- [FieldKey.LAST_SEEN]: {
- ...builtinTags[FieldKey.LAST_SEEN],
- name: 'Last Seen',
- values: [],
- predefined: false,
- },
- [FieldKey.FIRST_SEEN]: {
- ...builtinTags[FieldKey.FIRST_SEEN],
- name: 'First Seen',
- values: [],
- predefined: false,
- },
- [FieldKey.FIRST_RELEASE]: {
- ...builtinTags[FieldKey.FIRST_RELEASE],
- name: 'First Release',
- values: ['latest'],
- predefined: true,
- },
- [FieldKey.EVENT_TIMESTAMP]: {
- ...builtinTags[FieldKey.EVENT_TIMESTAMP],
- name: 'Event Timestamp',
- values: [],
- predefined: true,
- },
- [FieldKey.TIMES_SEEN]: {
- ...builtinTags[FieldKey.TIMES_SEEN],
- name: 'Times Seen',
- isInput: true,
- // Below values are required or else SearchBar will attempt to get values
- // This is required or else SearchBar will attempt to get values
- values: [],
- predefined: true,
- },
- [FieldKey.ASSIGNED_OR_SUGGESTED]: {
- ...builtinTags[FieldKey.ASSIGNED_OR_SUGGESTED],
- name: 'Assigned or Suggested',
- isInput: true,
- values: [],
- predefined: true,
- },
- [FieldKey.ISSUE_PRIORITY]: {
- ...builtinTags[FieldKey.ISSUE_PRIORITY],
- name: 'Issue Priority',
- values: [PriorityLevel.HIGH, PriorityLevel.MEDIUM, PriorityLevel.LOW],
- predefined: true,
- },
- };
-
- // Ony include fields that that are part of the ISSUE_FIELDS. This is
- // because we may sometimes have fields that are turned off by removing
- // them from ISSUE_FIELDS
- const filteredCollection = Object.entries(tagCollection).filter(([key]) =>
- ISSUE_FIELDS.includes(key as FieldKey)
- );
-
- return Object.fromEntries(filteredCollection);
- },
-
- /**
- * Get all tags including builtin issue tags and issue attributes
- */
- getIssueTags(org: Organization) {
- const eventTags = Object.values(this.state).reduce((acc, tag) => {
- return {
- ...acc,
- [tag.key]: {
- ...tag,
- kind: FieldKind.TAG,
- },
- };
- }, {});
-
- const semverFields = Object.values(SEMVER_TAGS).reduce((acc, tag) => {
- return {
- ...acc,
- [tag.key]: {
- predefined: false,
- ...tag,
- kind: org.features.includes('issue-stream-search-query-builder')
- ? FieldKind.EVENT_FIELD
- : FieldKind.FIELD,
- },
- };
- }, {});
-
- const issueTags = {
- ...getBuiltInTags(org),
- ...semverFields,
- ...eventTags,
- ...this.getIssueAttributes(org),
- };
- return issueTags;
- },
-
getState() {
return this.state;
},
diff --git a/static/app/utils/withIssueTags.spec.tsx b/static/app/utils/withIssueTags.spec.tsx
deleted file mode 100644
index 0766a473120cc2..00000000000000
--- a/static/app/utils/withIssueTags.spec.tsx
+++ /dev/null
@@ -1,121 +0,0 @@
-import {OrganizationFixture} from 'sentry-fixture/organization';
-import {TeamFixture} from 'sentry-fixture/team';
-import {UserFixture} from 'sentry-fixture/user';
-
-import {act, render, screen, waitFor} from 'sentry-test/reactTestingLibrary';
-
-import type {SearchGroup} from 'sentry/components/smartSearchBar/types';
-import MemberListStore from 'sentry/stores/memberListStore';
-import TagStore from 'sentry/stores/tagStore';
-import TeamStore from 'sentry/stores/teamStore';
-import type {WithIssueTagsProps} from 'sentry/utils/withIssueTags';
-import withIssueTags from 'sentry/utils/withIssueTags';
-
-interface MyComponentProps extends WithIssueTagsProps {
- forwardedValue: string;
-}
-function MyComponent(props: MyComponentProps) {
- return (
-
- ForwardedValue: {props.forwardedValue}
- {'is: ' + props.tags?.is?.values?.[0]}
- {'mechanism: ' + props.tags?.mechanism?.values?.join(', ')}
- {'bookmarks: ' + props.tags?.bookmarks?.values?.join(', ')}
- {'assigned: ' +
- (props.tags?.assigned?.values as SearchGroup[])
- .flatMap(x => x.children)
- .map(x => x.desc)
- ?.join(', ')}
- {'stack filename: ' + props.tags?.['stack.filename'].name}
-
- );
-}
-
-describe('withIssueTags HoC', function () {
- beforeEach(() => {
- TeamStore.reset();
- TagStore.reset();
- MemberListStore.loadInitialData([]);
- });
-
- it('forwards loaded tags to the wrapped component', async function () {
- const Container = withIssueTags(MyComponent);
- render( );
-
- // Should forward props.
- expect(await screen.findByText(/ForwardedValue: value/)).toBeInTheDocument();
-
- act(() => {
- TagStore.loadTagsSuccess([
- {name: 'MechanismTag', key: 'mechanism', values: ['MechanismTagValue']},
- ]);
- });
-
- // includes custom tags
- await waitFor(() => {
- expect(screen.getByText(/MechanismTagValue/)).toBeInTheDocument();
- });
-
- // should include special issue and attributes.
- expect(screen.getByText(/is: resolved/)).toBeInTheDocument();
- expect(screen.getByText(/bookmarks: me/)).toBeInTheDocument();
- expect(screen.getByText(/assigned: me/)).toBeInTheDocument();
- expect(screen.getByText(/stack filename: stack.filename/)).toBeInTheDocument();
- });
-
- it('updates the assigned tags with users and teams, and bookmark tags with users', function () {
- const Container = withIssueTags(MyComponent);
- render( );
-
- act(() => {
- TagStore.loadTagsSuccess([
- {name: 'MechanismTag', key: 'mechanism', values: ['MechanismTagValue']},
- ]);
- });
-
- expect(
- screen.getByText(/assigned: me, my_teams, none, \[me, my_teams, none\]/)
- ).toBeInTheDocument();
-
- act(() => {
- TeamStore.loadInitialData([
- TeamFixture({slug: 'best-team-na', name: 'Best Team NA', isMember: true}),
- ]);
- MemberListStore.loadInitialData([
- UserFixture(),
- UserFixture({username: 'joe@example.com'}),
- ]);
- });
-
- expect(
- screen.getByText(
- /assigned: me, my_teams, none, \[me, my_teams, none\], #best-team-na, foo@example.com, joe@example.com/
- )
- ).toBeInTheDocument();
-
- expect(
- screen.getByText(/bookmarks: me, foo@example.com, joe@example.com/)
- ).toBeInTheDocument();
- });
-
- it('groups assignees and puts suggestions first', function () {
- const Container = withIssueTags(MyComponent);
- TeamStore.loadInitialData([
- TeamFixture({id: '1', slug: 'best-team', name: 'Best Team', isMember: true}),
- TeamFixture({id: '2', slug: 'worst-team', name: 'Worst Team', isMember: false}),
- ]);
- MemberListStore.loadInitialData([
- UserFixture(),
- UserFixture({username: 'joe@example.com'}),
- ]);
- const {container} = render(
-
- );
-
- expect(container).toHaveTextContent(
- 'assigned: me, my_teams, none, [me, my_teams, none], #best-team'
- );
- // Has the other teams/members
- expect(container).toHaveTextContent('foo@example.com, joe@example.com, #worst-team');
- });
-});
diff --git a/static/app/utils/withIssueTags.tsx b/static/app/utils/withIssueTags.tsx
deleted file mode 100644
index 974483be4bb595..00000000000000
--- a/static/app/utils/withIssueTags.tsx
+++ /dev/null
@@ -1,126 +0,0 @@
-import {useEffect, useMemo, useState} from 'react';
-
-import type {SearchGroup} from 'sentry/components/smartSearchBar/types';
-import {ItemType} from 'sentry/components/smartSearchBar/types';
-import {escapeTagValue} from 'sentry/components/smartSearchBar/utils';
-import {IconStar, IconUser} from 'sentry/icons';
-import {t} from 'sentry/locale';
-import MemberListStore from 'sentry/stores/memberListStore';
-import TagStore from 'sentry/stores/tagStore';
-import TeamStore from 'sentry/stores/teamStore';
-import {useLegacyStore} from 'sentry/stores/useLegacyStore';
-import type {TagCollection} from 'sentry/types/group';
-import type {Organization} from 'sentry/types/organization';
-import type {User} from 'sentry/types/user';
-import getDisplayName from 'sentry/utils/getDisplayName';
-
-export interface WithIssueTagsProps {
- organization: Organization;
- tags: TagCollection;
-}
-
-type HocProps = {
- organization: Organization;
-};
-
-const uuidPattern = /[0-9a-f]{32}$/;
-const getUsername = ({isManaged, username, email}: User) => {
- // Users created via SAML receive unique UUID usernames. Use
- // their email in these cases, instead.
- if (username && uuidPattern.test(username)) {
- return email;
- }
- return !isManaged && username ? username : email;
-};
-
-function convertToSearchItem(value: string) {
- const escapedValue = escapeTagValue(value);
- return {
- value: escapedValue,
- desc: value,
- type: ItemType.TAG_VALUE,
- };
-}
-
-/**
- * HOC for getting tags and many useful issue attributes as 'tags' for use
- * in autocomplete selectors or condition builders.
- */
-function withIssueTags(
- WrappedComponent: React.ComponentType
-) {
- function ComponentWithTags(props: Omit & HocProps) {
- const {teams} = useLegacyStore(TeamStore);
- const {members} = useLegacyStore(MemberListStore);
- const [tags, setTags] = useState(
- TagStore.getIssueTags(props.organization)
- );
-
- const issueTags = useMemo((): TagCollection => {
- const usernames: string[] = members.map(getUsername);
- const userTeams = teams.filter(team => team.isMember).map(team => `#${team.slug}`);
- const nonMemberTeams = teams
- .filter(team => !team.isMember)
- .map(team => `#${team.slug}`);
-
- const suggestedAssignees: string[] = [
- 'me',
- 'my_teams',
- 'none',
- // New search builder only works with single value suggestions
- ...(props.organization.features.includes('issue-stream-search-query-builder')
- ? []
- : ['[me, my_teams, none]']),
- ...userTeams,
- ];
- const assignedValues: SearchGroup[] | string[] = [
- {
- title: t('Suggested Values'),
- type: 'header',
- icon: ,
- children: suggestedAssignees.map(convertToSearchItem),
- },
- {
- title: t('All Values'),
- type: 'header',
- icon: ,
- children: [
- ...usernames.map(convertToSearchItem),
- ...nonMemberTeams.map(convertToSearchItem),
- ],
- },
- ];
-
- return {
- ...tags,
- assigned: {
- ...tags.assigned,
- values: assignedValues,
- },
- bookmarks: {
- ...tags.bookmarks,
- values: ['me', ...usernames],
- },
- assigned_or_suggested: {
- ...tags.assigned_or_suggested,
- values: assignedValues,
- },
- };
- }, [members, teams, props.organization.features, tags]);
-
- // Listen to tag store updates and cleanup listener on unmount
- useEffect(() => {
- const unsubscribeTags = TagStore.listen(() => {
- setTags(TagStore.getIssueTags(props.organization));
- }, undefined);
-
- return () => unsubscribeTags();
- }, [props.organization, setTags]);
-
- return ;
- }
- ComponentWithTags.displayName = `withIssueTags(${getDisplayName(WrappedComponent)})`;
- return ComponentWithTags;
-}
-
-export default withIssueTags;
diff --git a/static/app/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/issuesSearchBar.tsx b/static/app/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/issuesSearchBar.tsx
index 67308eb2f2b13c..29f137b6ba40c4 100644
--- a/static/app/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/issuesSearchBar.tsx
+++ b/static/app/views/dashboards/widgetBuilder/buildSteps/filterResultsStep/issuesSearchBar.tsx
@@ -1,14 +1,11 @@
-import {ClassNames} from '@emotion/react';
+import {useCallback} from 'react';
import styled from '@emotion/styled';
import type {SearchBarProps} from 'sentry/components/events/searchBar';
+import type {SearchQueryBuilderProps} from 'sentry/components/searchQueryBuilder';
import {t} from 'sentry/locale';
import type {Organization} from 'sentry/types/organization';
import type {WidgetQuery} from 'sentry/views/dashboards/types';
-import {
- MAX_MENU_HEIGHT,
- MAX_SEARCH_ITEMS,
-} from 'sentry/views/dashboards/widgetBuilder/utils';
import IssueListSearchBar from 'sentry/views/issueList/searchBar';
interface Props {
@@ -18,35 +15,21 @@ interface Props {
}
function IssuesSearchBar({onClose, widgetQuery, organization}: Props) {
- if (organization.features.includes('issue-stream-search-query-builder')) {
- return (
-
- );
- }
+ const onChange = useCallback>(
+ (query, state) => {
+ onClose?.(query, {validSearch: state.queryIsValid});
+ },
+ [onClose]
+ );
return (
-
- {({css}) => (
-
- )}
-
+
);
}
@@ -55,10 +38,3 @@ export {IssuesSearchBar};
const StyledIssueListSearchQueryBuilder = styled(IssueListSearchBar)`
flex-grow: 1;
`;
-
-const StyledIssueListSearchBar = styled(IssueListSearchBar)`
- flex-grow: 1;
- button:not([aria-label='Clear search']) {
- display: none;
- }
-`;
diff --git a/static/app/views/issueDetails/groupEvents.spec.tsx b/static/app/views/issueDetails/groupEvents.spec.tsx
index 0bae2ade6aa6d4..3bbea6982c7f69 100644
--- a/static/app/views/issueDetails/groupEvents.spec.tsx
+++ b/static/app/views/issueDetails/groupEvents.spec.tsx
@@ -150,38 +150,9 @@ describe('groupEvents', () => {
expect(screen.getByText('sentry@sentry.sentry')).toBeInTheDocument();
});
- it('handles search', async () => {
+ it('pushes new query parameter when searching', async () => {
render( , {
router,
- organization,
- });
-
- const list = [
- {searchTerm: '', expectedQuery: ''},
- {searchTerm: 'test', expectedQuery: 'test'},
- {searchTerm: 'environment:production test', expectedQuery: 'test'},
- ];
-
- await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator'));
- const input = screen.getByPlaceholderText('Search for events, users, tags, and more');
-
- for (const item of list) {
- await userEvent.clear(input);
- await userEvent.paste(`${item.searchTerm}`);
- await userEvent.keyboard('[Enter>]');
-
- expect(browserHistory.push).toHaveBeenCalledWith(
- expect.objectContaining({
- query: {query: item.expectedQuery},
- })
- );
- }
- });
-
- it('pushes new query parameter when searching (issue-stream-search-query-builder)', async () => {
- render( , {
- router,
- organization: {...organization, features: ['issue-stream-search-query-builder']},
});
await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator'));
@@ -200,7 +171,7 @@ describe('groupEvents', () => {
});
});
- it('displays event filters and tags (issue-stream-search-query-builder)', async () => {
+ it('displays event filters and tags', async () => {
MockApiClient.addMockResponse({
url: '/organizations/org-slug/issues/1/tags/',
body: [{key: 'custom_tag', name: 'custom_tag', totalValues: 1}],
@@ -208,7 +179,6 @@ describe('groupEvents', () => {
render( , {
router,
- organization: {...organization, features: ['issue-stream-search-query-builder']},
});
await waitForElementToBeRemoved(() => screen.queryByTestId('loading-indicator'));
diff --git a/static/app/views/issueDetails/groupEvents.tsx b/static/app/views/issueDetails/groupEvents.tsx
index 2c175b69a45984..b77db4a5294298 100644
--- a/static/app/views/issueDetails/groupEvents.tsx
+++ b/static/app/views/issueDetails/groupEvents.tsx
@@ -1,7 +1,6 @@
import {useCallback} from 'react';
import styled from '@emotion/styled';
-import EventSearchBar from 'sentry/components/events/searchBar';
import * as Layout from 'sentry/components/layouts/thirds';
import {space} from 'sentry/styles/space';
import type {Group} from 'sentry/types/group';
@@ -55,24 +54,12 @@ function GroupEvents({params, location, group, environments}: Props) {
- {organization.features.includes('issue-stream-search-query-builder') ? (
-
- ) : (
-
- )}
+
{
url: '/organizations/org-slug/tags/',
body: [],
});
+ MockApiClient.addMockResponse({
+ url: '/organizations/org-slug/recent-searches/',
+ body: [],
+ });
});
- it('displays "Custom Search" label when no saved searches are selected', () => {
+ it('displays "Custom Search" label when no saved searches are selected', async () => {
render( );
- expect(screen.getByRole('button', {name: 'Custom Search'})).toBeInTheDocument();
+ expect(
+ await screen.findByRole('button', {name: 'Custom Search'})
+ ).toBeInTheDocument();
});
it('displays salected saved search label when one is selected', async () => {
diff --git a/static/app/views/issueList/issueSearchWithSavedSearches.tsx b/static/app/views/issueList/issueSearchWithSavedSearches.tsx
index 027dcf8ea70487..cec899b7d6b302 100644
--- a/static/app/views/issueList/issueSearchWithSavedSearches.tsx
+++ b/static/app/views/issueList/issueSearchWithSavedSearches.tsx
@@ -45,9 +45,8 @@ export function IssueSearchWithSavedSearches({
diff --git a/static/app/views/issueList/overview.spec.tsx b/static/app/views/issueList/overview.spec.tsx
index 8874621586bc8a..6361332c50b99b 100644
--- a/static/app/views/issueList/overview.spec.tsx
+++ b/static/app/views/issueList/overview.spec.tsx
@@ -63,6 +63,14 @@ const routerProps = {
location: router.location,
};
+function getSearchInput() {
+ const input = screen.getAllByRole('combobox', {name: 'Add a search term'}).at(-1);
+
+ expect(input).toBeInTheDocument();
+
+ return input!;
+}
+
describe('IssueList', function () {
let props;
@@ -76,7 +84,6 @@ describe('IssueList', function () {
name: 'Unresolved TypeErrors',
});
- let fetchTagsRequest: jest.Mock;
let fetchMembersRequest: jest.Mock;
const api = new MockApiClient();
const parseLinkHeaderSpy = jest.spyOn(parseLinkHeader, 'default');
@@ -86,6 +93,7 @@ describe('IssueList', function () {
// It should be safe to ignore this error, but we should remove the mock once we move to react testing library
// eslint-disable-next-line no-console
jest.spyOn(console, 'error').mockImplementation(jest.fn());
+ Object.defineProperty(Element.prototype, 'clientWidth', {value: 1000});
MockApiClient.addMockResponse({
url: '/organizations/org-slug/issues/',
@@ -128,7 +136,7 @@ describe('IssueList', function () {
},
],
});
- fetchTagsRequest = MockApiClient.addMockResponse({
+ MockApiClient.addMockResponse({
url: '/organizations/org-slug/tags/',
method: 'GET',
body: tags,
@@ -209,11 +217,16 @@ describe('IssueList', function () {
await waitForElementToBeRemoved(() => screen.getByTestId('loading-indicator'));
expect(savedSearchesRequest).toHaveBeenCalledTimes(1);
- await userEvent.click(await screen.findByDisplayValue(DEFAULT_QUERY));
+ await screen.findByRole('grid', {name: 'Create a search query'});
+ expect(screen.getByRole('row', {name: 'is:unresolved'})).toBeInTheDocument();
+ expect(screen.getByRole('button', {name: /custom search/i})).toBeInTheDocument();
+
+ await userEvent.click(getSearchInput());
// auxillary requests being made
- expect(recentSearchesRequest).toHaveBeenCalledTimes(1);
- expect(fetchTagsRequest).toHaveBeenCalledTimes(1);
+ await waitFor(() => {
+ expect(recentSearchesRequest).toHaveBeenCalledTimes(1);
+ });
expect(fetchMembersRequest).toHaveBeenCalledTimes(1);
// primary /issues/ request
@@ -224,10 +237,6 @@ describe('IssueList', function () {
data: expect.stringContaining('is%3Aunresolved'),
})
);
-
- expect(screen.getByDisplayValue(DEFAULT_QUERY)).toBeInTheDocument();
-
- expect(screen.getByRole('button', {name: /custom search/i})).toBeInTheDocument();
});
it('loads with query in URL and pinned queries', async function () {
@@ -261,7 +270,7 @@ describe('IssueList', function () {
);
});
- expect(screen.getByDisplayValue('level:foo')).toBeInTheDocument();
+ expect(screen.getByRole('row', {name: 'level:foo'})).toBeInTheDocument();
// Tab shows "custom search"
expect(screen.getByRole('button', {name: 'Custom Search'})).toBeInTheDocument();
@@ -294,7 +303,7 @@ describe('IssueList', function () {
);
});
- expect(screen.getByDisplayValue('is:resolved')).toBeInTheDocument();
+ expect(screen.getByRole('row', {name: 'is:resolved'})).toBeInTheDocument();
// Organization saved search selector should have default saved search selected
expect(screen.getByRole('button', {name: 'My Default Search'})).toBeInTheDocument();
@@ -334,7 +343,7 @@ describe('IssueList', function () {
);
});
- expect(screen.getByDisplayValue('assigned:me')).toBeInTheDocument();
+ expect(screen.getByRole('row', {name: 'assigned:me'})).toBeInTheDocument();
// Organization saved search selector should have default saved search selected
expect(screen.getByRole('button', {name: 'Assigned to Me'})).toBeInTheDocument();
@@ -371,7 +380,7 @@ describe('IssueList', function () {
);
});
- expect(screen.getByDisplayValue('level:error')).toBeInTheDocument();
+ expect(screen.getByRole('row', {name: 'level:error'})).toBeInTheDocument();
// Organization saved search selector should have default saved search selected
expect(screen.getByRole('button', {name: 'Custom Search'})).toBeInTheDocument();
@@ -408,7 +417,7 @@ describe('IssueList', function () {
);
});
- expect(screen.getByDisplayValue('is:resolved')).toBeInTheDocument();
+ expect(screen.getByRole('row', {name: 'is:resolved'})).toBeInTheDocument();
// Organization saved search selector should have default saved search selected
expect(screen.getByRole('button', {name: 'My Default Search'})).toBeInTheDocument();
@@ -505,9 +514,10 @@ describe('IssueList', function () {
await waitForElementToBeRemoved(() => screen.getByTestId('loading-indicator'));
- const queryInput = screen.getByDisplayValue('is:resolved');
- await userEvent.clear(queryInput);
- await userEvent.type(queryInput, 'dogs{enter}');
+ await screen.findByRole('grid', {name: 'Create a search query'});
+ await userEvent.click(screen.getByRole('button', {name: 'Clear search query'}));
+ await userEvent.click(getSearchInput());
+ await userEvent.keyboard('dogs{Enter}');
expect(browserHistory.push).toHaveBeenLastCalledWith(
expect.objectContaining({
@@ -544,11 +554,13 @@ describe('IssueList', function () {
await waitForElementToBeRemoved(() => screen.getByTestId('loading-indicator'));
- const queryInput = screen.getByDisplayValue(DEFAULT_QUERY);
- await userEvent.clear(queryInput);
- await userEvent.type(queryInput, 'assigned:me level:fatal{enter}');
+ await screen.findByRole('grid', {name: 'Create a search query'});
+ await userEvent.click(screen.getByRole('button', {name: 'Clear search query'}));
+ await userEvent.click(getSearchInput());
+ await userEvent.paste('assigned:me level:fatal');
+ await userEvent.keyboard('{Enter}');
- expect((browserHistory.push as jest.Mock).mock.calls[0][0]).toEqual(
+ expect(browserHistory.push as jest.Mock).toHaveBeenCalledWith(
expect.objectContaining({
query: expect.objectContaining({
query: 'assigned:me level:fatal',
@@ -937,19 +949,22 @@ describe('IssueList', function () {
router,
});
- const queryInput = screen.getByDisplayValue(DEFAULT_QUERY);
- await userEvent.clear(queryInput);
- await userEvent.type(queryInput, 'is:ignored{enter}');
+ await userEvent.click(screen.getByRole('button', {name: 'Clear search query'}));
+ await userEvent.click(getSearchInput());
+ await userEvent.paste('is:ignored');
+ await userEvent.keyboard('{enter}');
- expect(browserHistory.push).toHaveBeenCalledWith({
- pathname: '/organizations/org-slug/issues/',
- query: {
- environment: [],
- project: [parseInt(project.id, 10)],
- query: 'is:ignored',
- statsPeriod: '14d',
- referrer: 'issue-list',
- },
+ await waitFor(() => {
+ expect(browserHistory.push).toHaveBeenCalledWith({
+ pathname: '/organizations/org-slug/issues/',
+ query: {
+ environment: [],
+ project: [parseInt(project.id, 10)],
+ query: 'is:ignored',
+ statsPeriod: '14d',
+ referrer: 'issue-list',
+ },
+ });
});
});
});
@@ -958,7 +973,6 @@ describe('IssueList', function () {
render( , {router});
await waitFor(() => {
- expect(fetchTagsRequest).toHaveBeenCalled();
expect(fetchMembersRequest).toHaveBeenCalled();
});
});
@@ -977,7 +991,7 @@ describe('IssueList', function () {
fetchDataMock.mockReset();
});
- it('fetches data on selection change', function () {
+ it('fetches data on selection change', async function () {
const {rerender} = render( , {
router,
});
@@ -990,10 +1004,12 @@ describe('IssueList', function () {
/>
);
- expect(fetchDataMock).toHaveBeenCalled();
+ await waitFor(() => {
+ expect(fetchDataMock).toHaveBeenCalled();
+ });
});
- it('fetches data on savedSearch change', function () {
+ it('fetches data on savedSearch change', async function () {
const {rerender} = render( , {
router,
});
@@ -1006,33 +1022,38 @@ describe('IssueList', function () {
/>
);
- expect(fetchDataMock).toHaveBeenCalled();
+ await waitFor(() => {
+ expect(fetchDataMock).toHaveBeenCalled();
+ });
});
- it('uses correct statsPeriod when fetching issues list and no datetime given', function () {
+ it('uses correct statsPeriod when fetching issues list and no datetime given', async function () {
const {rerender} = render( , {
router,
});
const selection = {projects: [99], environments: [], datetime: {}};
rerender( );
- expect(fetchDataMock).toHaveBeenLastCalledWith(
- '/organizations/org-slug/issues/',
- expect.objectContaining({
- data: 'collapse=stats&collapse=unhandled&expand=owners&expand=inbox&limit=25&project=99&query=is%3Aunresolved%20issue.priority%3A%5Bhigh%2C%20medium%5D&savedSearch=1&shortIdLookup=1&statsPeriod=14d',
- })
- );
+ await waitFor(() => {
+ expect(fetchDataMock).toHaveBeenLastCalledWith(
+ '/organizations/org-slug/issues/',
+ expect.objectContaining({
+ data: 'collapse=stats&collapse=unhandled&expand=owners&expand=inbox&limit=25&project=99&query=is%3Aunresolved%20issue.priority%3A%5Bhigh%2C%20medium%5D&savedSearch=1&shortIdLookup=1&statsPeriod=14d',
+ })
+ );
+ });
});
});
describe('componentDidUpdate fetching members', function () {
- it('fetches memberlist and tags list on project change', function () {
+ it('fetches memberlist on project change', async function () {
const {rerender} = render( , {
router,
});
// Called during componentDidMount
- expect(fetchMembersRequest).toHaveBeenCalledTimes(1);
- expect(fetchTagsRequest).toHaveBeenCalledTimes(1);
+ await waitFor(() => {
+ expect(fetchMembersRequest).toHaveBeenCalled();
+ });
const selection = {
projects: [99],
@@ -1040,17 +1061,26 @@ describe('IssueList', function () {
datetime: {period: '24h'},
};
rerender( );
- expect(fetchMembersRequest).toHaveBeenCalledTimes(2);
- expect(fetchTagsRequest).toHaveBeenCalledTimes(2);
+
+ await waitFor(() => {
+ expect(fetchMembersRequest).toHaveBeenCalledWith(
+ expect.anything(),
+ expect.objectContaining({
+ query: {
+ project: selection.projects.map(p => p.toString()),
+ },
+ })
+ );
+ });
});
});
describe('render states', function () {
- it('displays the loading icon when saved searches are loading', function () {
+ it('displays the loading icon when saved searches are loading', async function () {
render( , {
router,
});
- expect(screen.getByTestId('loading-indicator')).toBeInTheDocument();
+ expect(await screen.findByTestId('loading-indicator')).toBeInTheDocument();
});
it('displays an error when issues fail to load', async function () {
@@ -1092,10 +1122,9 @@ describe('IssueList', function () {
render( , {router});
- await userEvent.type(
- screen.getByDisplayValue(DEFAULT_QUERY),
- ' level:error{enter}'
- );
+ await screen.findByRole('grid', {name: 'Create a search query'});
+ await userEvent.click(getSearchInput());
+ await userEvent.keyboard('foo{enter}');
expect(
await screen.findByText(/We couldn't find any issues that matched your filters/i)
@@ -1311,7 +1340,7 @@ describe('IssueList', function () {
});
});
- it('displays a count that represents the current page', function () {
+ it('displays a count that represents the current page', async function () {
MockApiClient.addMockResponse({
url: '/organizations/org-slug/issues/',
body: [...new Array(25)].map((_, i) => ({id: i})),
@@ -1349,7 +1378,9 @@ describe('IssueList', function () {
router: newRouter,
});
- expect(screen.getByText(textWithMarkupMatcher('1-25 of 500'))).toBeInTheDocument();
+ await waitFor(() => {
+ expect(screen.getByText(textWithMarkupMatcher('1-25 of 500'))).toBeInTheDocument();
+ });
parseLinkHeaderSpy.mockReturnValue({
next: {
@@ -1365,7 +1396,9 @@ describe('IssueList', function () {
});
rerender( );
- expect(screen.getByText(textWithMarkupMatcher('26-50 of 500'))).toBeInTheDocument();
+ await waitFor(() => {
+ expect(screen.getByText(textWithMarkupMatcher('26-50 of 500'))).toBeInTheDocument();
+ });
});
describe('project low trends queue alert', function () {
@@ -1375,18 +1408,20 @@ describe('IssueList', function () {
act(() => ProjectsStore.reset());
});
- it('does not render event processing alert', function () {
+ it('does not render event processing alert', async function () {
act(() => ProjectsStore.loadInitialData([project]));
render( , {
router: newRouter,
});
- expect(screen.queryByText(/event processing/i)).not.toBeInTheDocument();
+ await waitFor(() => {
+ expect(screen.queryByText(/event processing/i)).not.toBeInTheDocument();
+ });
});
describe('renders alert', function () {
- it('for one project', function () {
+ it('for one project', async function () {
act(() =>
ProjectsStore.loadInitialData([
{...project, eventProcessing: {symbolicationDegraded: true}},
@@ -1395,12 +1430,14 @@ describe('IssueList', function () {
render( , {router});
- expect(
- screen.getByText(/Event Processing for this project is currently degraded/i)
- ).toBeInTheDocument();
+ await waitFor(() => {
+ expect(
+ screen.getByText(/Event Processing for this project is currently degraded/i)
+ ).toBeInTheDocument();
+ });
});
- it('for multiple projects', function () {
+ it('for multiple projects', async function () {
const projectBar = ProjectFixture({
id: '3560',
name: 'Bar Project',
@@ -1435,13 +1472,15 @@ describe('IssueList', function () {
}
);
- expect(
- screen.getByText(
- textWithMarkupMatcher(
- 'Event Processing for the project-slug, project-slug-bar projects is currently degraded.'
+ await waitFor(() => {
+ expect(
+ screen.getByText(
+ textWithMarkupMatcher(
+ 'Event Processing for the project-slug, project-slug-bar projects is currently degraded.'
+ )
)
- )
- ).toBeInTheDocument();
+ ).toBeInTheDocument();
+ });
});
});
});
diff --git a/static/app/views/issueList/overview.tsx b/static/app/views/issueList/overview.tsx
index 269d5cbac75054..515255f6402f4b 100644
--- a/static/app/views/issueList/overview.tsx
+++ b/static/app/views/issueList/overview.tsx
@@ -12,7 +12,6 @@ import * as qs from 'query-string';
import {addMessage} from 'sentry/actionCreators/indicator';
import {fetchOrgMembers, indexMembersByProject} from 'sentry/actionCreators/members';
-import {fetchTagValues, loadOrganizationTags} from 'sentry/actionCreators/tags';
import type {Client} from 'sentry/api';
import ErrorBoundary from 'sentry/components/errorBoundary';
import * as Layout from 'sentry/components/layouts/thirds';
@@ -27,13 +26,7 @@ import IssueListCacheStore from 'sentry/stores/IssueListCacheStore';
import SelectedGroupStore from 'sentry/stores/selectedGroupStore';
import {space} from 'sentry/styles/space';
import type {PageFilters} from 'sentry/types/core';
-import type {
- BaseGroup,
- Group,
- PriorityLevel,
- SavedSearch,
- TagCollection,
-} from 'sentry/types/group';
+import type {BaseGroup, Group, PriorityLevel, SavedSearch} from 'sentry/types/group';
import {GroupStatus, IssueCategory} from 'sentry/types/group';
import type {RouteComponentProps} from 'sentry/types/legacyReactRouter';
import type {Organization} from 'sentry/types/organization';
@@ -51,7 +44,6 @@ import type {WithRouteAnalyticsProps} from 'sentry/utils/routeAnalytics/withRout
import withRouteAnalytics from 'sentry/utils/routeAnalytics/withRouteAnalytics';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
import withApi from 'sentry/utils/withApi';
-import withIssueTags from 'sentry/utils/withIssueTags';
import withOrganization from 'sentry/utils/withOrganization';
import withPageFilters from 'sentry/utils/withPageFilters';
import withSavedSearches from 'sentry/utils/withSavedSearches';
@@ -98,7 +90,6 @@ type Props = {
savedSearches: SavedSearch[];
selectedSearchId: string;
selection: PageFilters;
- tags: TagCollection;
} & RouteComponentProps<{}, {searchId?: string}> &
WithRouteAnalyticsProps;
@@ -190,7 +181,6 @@ class IssueListOverview extends Component {
this.fetchData();
}
}
- this.fetchTags();
this.fetchMemberList();
this.props.setRouteAnalyticsParams?.({
issue_views_enabled: this.props.organization.features.includes(
@@ -216,7 +206,6 @@ class IssueListOverview extends Component {
if (!isEqual(prevProps.selection.projects, this.props.selection.projects)) {
this.loadFromCache();
this.fetchMemberList();
- this.fetchTags();
}
const selectionChanged = !isEqual(prevProps.selection, this.props.selection);
@@ -464,13 +453,6 @@ class IssueListOverview extends Component {
);
}
- fetchTags() {
- if (!this.props.organization.features.includes('issue-stream-search-query-builder')) {
- const {api, organization, selection} = this.props;
- loadOrganizationTags(api, organization.slug, selection);
- }
- }
-
fetchStats = (groups: string[]) => {
// If we have no groups to fetch, just skip stats
if (!groups.length) {
@@ -1146,21 +1128,6 @@ class IssueListOverview extends Component {
}
};
- tagValueLoader = (key: string, search: string) => {
- const {organization} = this.props;
- const projectIds = this.getSelectedProjectIds();
- const endpointParams = this.getEndpointParams();
-
- return fetchTagValues({
- api: this.props.api,
- orgSlug: organization.slug,
- tagKey: key,
- search,
- projectIds,
- endpointParams: endpointParams as any,
- });
- };
-
getPageCounts = () => {
const {location} = this.props;
const {pageLinks, queryCount, groupIds} = this.state;
@@ -1308,9 +1275,7 @@ class IssueListOverview extends Component {
export default withRouteAnalytics(
withApi(
withPageFilters(
- withSavedSearches(
- withOrganization(withIssueTags(Sentry.withProfiler(IssueListOverview)))
- )
+ withSavedSearches(withOrganization(Sentry.withProfiler(IssueListOverview)))
)
)
);
diff --git a/static/app/views/issueList/savedIssueSearches.spec.tsx b/static/app/views/issueList/savedIssueSearches.spec.tsx
index cb47fda60df04c..a5992abc203ef8 100644
--- a/static/app/views/issueList/savedIssueSearches.spec.tsx
+++ b/static/app/views/issueList/savedIssueSearches.spec.tsx
@@ -69,6 +69,10 @@ describe('SavedIssueSearches', function () {
url: '/organizations/org-slug/tags/',
body: [],
});
+ MockApiClient.addMockResponse({
+ url: '/organizations/org-slug/recent-searches/',
+ body: [],
+ });
});
it('displays saved searches with correct text and in correct sections', async function () {
diff --git a/static/app/views/issueList/searchBar.spec.tsx b/static/app/views/issueList/searchBar.spec.tsx
index b4fb4250da2d6c..51282b824dfc88 100644
--- a/static/app/views/issueList/searchBar.spec.tsx
+++ b/static/app/views/issueList/searchBar.spec.tsx
@@ -9,22 +9,13 @@ import {IsFieldValues} from 'sentry/utils/fields';
import IssueListSearchBar from 'sentry/views/issueList/searchBar';
describe('IssueListSearchBar', function () {
- let recentSearchMock;
- let defaultProps;
-
- const {router, organization} = initializeOrg();
+ const {organization} = initializeOrg();
beforeEach(function () {
TagStore.reset();
TagStore.loadTagsSuccess(TagsFixture());
- defaultProps = {
- organization,
- query: '',
- onSearch: jest.fn(),
- };
-
- recentSearchMock = MockApiClient.addMockResponse({
+ MockApiClient.addMockResponse({
url: '/organizations/org-slug/recent-searches/',
method: 'GET',
body: [],
@@ -35,145 +26,9 @@ describe('IssueListSearchBar', function () {
MockApiClient.clearMockResponses();
});
- describe('updateAutoCompleteItems()', function () {
- it('sets state with complete tag', async function () {
- const tagValuesMock = MockApiClient.addMockResponse({
- url: '/organizations/org-slug/tags/url/values/',
- method: 'GET',
- body: [],
- });
-
- render( , {
- router,
- });
-
- await userEvent.click(screen.getByRole('textbox'));
- await userEvent.paste('url:"fu"');
-
- expect(tagValuesMock).toHaveBeenLastCalledWith(
- expect.anything(),
- expect.objectContaining({
- query: expect.objectContaining({
- query: 'fu',
- }),
- })
- );
-
- expect(screen.getByTestId('smart-search-dropdown')).toBeInTheDocument();
- });
-
- it('sets state when value has colon', async function () {
- const tagValuesMock = MockApiClient.addMockResponse({
- url: '/organizations/org-slug/tags/url/values/',
- method: 'GET',
- body: [],
- });
-
- render( , {
- router,
- });
-
- await userEvent.click(screen.getByRole('textbox'));
- await userEvent.paste('url:', {delay: null});
-
- expect(tagValuesMock).toHaveBeenCalled();
- });
-
- it('does not request values when tag is `timesSeen`', async function () {
- const tagValuesMock = MockApiClient.addMockResponse({
- url: '/organizations/org-slug/tags/url/values/',
- method: 'GET',
- body: [],
- });
-
- render( , {
- router,
- });
-
- await userEvent.click(screen.getByRole('textbox'));
- await userEvent.paste('timesSeen:', {delay: null});
-
- expect(tagValuesMock).not.toHaveBeenCalled();
- });
- });
-
- describe('Recent Searches', function () {
- it('saves search query as a recent search', async function () {
- const tagValuesMock = MockApiClient.addMockResponse({
- url: '/organizations/org-slug/tags/url/values/',
- method: 'GET',
- body: [],
- });
- const saveRecentSearch = MockApiClient.addMockResponse({
- url: '/organizations/org-slug/recent-searches/',
- method: 'POST',
- body: {},
- });
- const onSearch = jest.fn();
-
- render( , {
- router,
- });
-
- await userEvent.click(screen.getByRole('textbox'));
- await userEvent.paste('url:"fu"');
-
- expect(tagValuesMock).toHaveBeenLastCalledWith(
- expect.anything(),
- expect.objectContaining({
- query: expect.objectContaining({
- query: 'fu',
- }),
- })
- );
-
- expect(screen.getByTestId('smart-search-dropdown')).toBeInTheDocument();
-
- await userEvent.keyboard('{Enter}');
- expect(onSearch).toHaveBeenCalledWith('url:"fu"');
-
- expect(saveRecentSearch).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({
- data: {
- query: 'url:"fu"',
- type: 0,
- },
- })
- );
- });
-
- it('queries for recent searches', async function () {
- MockApiClient.addMockResponse({
- url: '/organizations/org-slug/tags/url/values/',
- method: 'GET',
- body: [],
- });
-
- render( , {router});
-
- await userEvent.click(screen.getByRole('textbox'));
- await userEvent.paste('is:', {delay: null});
-
- expect(recentSearchMock).toHaveBeenCalledWith(
- expect.anything(),
- expect.objectContaining({
- query: {
- query: 'is:',
- limit: 3,
- type: 0,
- },
- })
- );
- });
- });
-
describe('Tags and Fields', function () {
- const {router: routerWithFlag, organization: orgWithFlag} = initializeOrg();
- orgWithFlag.features = ['issue-stream-search-query-builder'];
-
- const newDefaultProps = {
- organization: orgWithFlag,
+ const defaultProps = {
+ organization,
query: '',
statsPeriod: '7d',
onSearch: jest.fn(),
@@ -185,9 +40,7 @@ describe('IssueListSearchBar', function () {
body: [],
});
- render( , {
- router: routerWithFlag,
- });
+ render( );
await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'}));
await userEvent.paste('is:', {delay: null});
@@ -206,12 +59,10 @@ describe('IssueListSearchBar', function () {
body: [{key: 'someTag', name: 'Some Tag'}],
});
- render( , {
- router: routerWithFlag,
- });
+ render( );
await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'}));
- await userEvent.click(screen.getByRole('button', {name: 'Event Tags'}));
+ await userEvent.click(await screen.findByRole('button', {name: 'Event Tags'}));
expect(await screen.findByRole('option', {name: 'someTag'})).toBeInTheDocument();
});
@@ -222,11 +73,7 @@ describe('IssueListSearchBar', function () {
body: [{key: 'someTag', name: 'Some Tag'}],
});
- defaultProps.organization.features = ['issue-stream-search-query-builder'];
-
- render( , {
- router: routerWithFlag,
- });
+ render( );
await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'}));
await userEvent.paste('has:', {delay: null});
@@ -239,11 +86,8 @@ describe('IssueListSearchBar', function () {
});
describe('Tag Values', function () {
- const {router: routerWithFlag, organization: orgWithFlag} = initializeOrg();
- orgWithFlag.features = ['issue-stream-search-query-builder'];
-
const newDefaultProps = {
- organization: orgWithFlag,
+ organization,
query: '',
statsPeriod: '7d',
onSearch: jest.fn(),
@@ -281,9 +125,7 @@ describe('IssueListSearchBar', function () {
body: tagValueResponse,
});
- render( , {
- router: routerWithFlag,
- });
+ render( );
await userEvent.click(screen.getByRole('combobox', {name: 'Add a search term'}));
await userEvent.paste(tagKey, {delay: null});
diff --git a/static/app/views/issueList/searchBar.tsx b/static/app/views/issueList/searchBar.tsx
index a054fcdc7b039d..40eb9fd73620fe 100644
--- a/static/app/views/issueList/searchBar.tsx
+++ b/static/app/views/issueList/searchBar.tsx
@@ -1,52 +1,26 @@
import {useCallback, useMemo} from 'react';
-import styled from '@emotion/styled';
import orderBy from 'lodash/orderBy';
// eslint-disable-next-line no-restricted-imports
import {fetchTagValues} from 'sentry/actionCreators/tags';
-import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
+import {
+ SearchQueryBuilder,
+ type SearchQueryBuilderProps,
+} from 'sentry/components/searchQueryBuilder';
import type {FilterKeySection} from 'sentry/components/searchQueryBuilder/types';
-import SmartSearchBar from 'sentry/components/smartSearchBar';
-import type {SearchGroup} from 'sentry/components/smartSearchBar/types';
-import {ItemType} from 'sentry/components/smartSearchBar/types';
-import {IconStar} from 'sentry/icons';
import {t} from 'sentry/locale';
-import {space} from 'sentry/styles/space';
import {SavedSearchType, type Tag, type TagCollection} from 'sentry/types/group';
import type {Organization} from 'sentry/types/organization';
import {getUtcDateString} from 'sentry/utils/dates';
-import {FieldKind, getFieldDefinition} from 'sentry/utils/fields';
+import {FieldKind} from 'sentry/utils/fields';
import useApi from 'sentry/utils/useApi';
import usePageFilters from 'sentry/utils/usePageFilters';
-import type {WithIssueTagsProps} from 'sentry/utils/withIssueTags';
-import withIssueTags from 'sentry/utils/withIssueTags';
import {Dataset} from 'sentry/views/alerts/rules/metric/types';
import {mergeAndSortTagValues} from 'sentry/views/issueDetails/utils';
import {makeGetIssueTagValues} from 'sentry/views/issueList/utils/getIssueTagValues';
import {useFetchIssueTags} from 'sentry/views/issueList/utils/useFetchIssueTags';
-const getSupportedTags = (supportedTags: TagCollection): TagCollection => {
- return Object.fromEntries(
- Object.keys(supportedTags).map(key => [
- key,
- {
- ...supportedTags[key],
- kind:
- getFieldDefinition(key)?.kind ??
- (supportedTags[key].predefined ? FieldKind.FIELD : FieldKind.TAG),
- },
- ])
- );
-};
-
-const getFilterKeySections = (
- tags: TagCollection,
- organization: Organization
-): FilterKeySection[] => {
- if (!organization.features.includes('issue-stream-search-query-builder')) {
- return [];
- }
-
+const getFilterKeySections = (tags: TagCollection): FilterKeySection[] => {
const allTags: Tag[] = Object.values(tags).filter(
tag => !EXCLUDED_TAGS.includes(tag.key)
);
@@ -83,20 +57,24 @@ const getFilterKeySections = (
];
};
-interface Props extends React.ComponentProps, WithIssueTagsProps {
+interface Props extends Partial {
organization: Organization;
}
const EXCLUDED_TAGS = ['environment'];
-function IssueListSearchBar({organization, tags, onClose, ...props}: Props) {
+function IssueListSearchBar({
+ organization,
+ searchSource = 'issues',
+ initialQuery = '',
+ ...props
+}: Props) {
const api = useApi();
const {selection: pageFilters} = usePageFilters();
const {tags: issueTags} = useFetchIssueTags({
org: organization,
projectIds: pageFilters.projects.map(id => id.toString()),
keepPreviousData: true,
- enabled: organization.features.includes('issue-stream-search-query-builder'),
start: pageFilters.datetime.start
? getUtcDateString(pageFilters.datetime.start)
: undefined,
@@ -162,127 +140,23 @@ function IssueListSearchBar({organization, tags, onClose, ...props}: Props) {
[tagValueLoader]
);
- const recommendedGroup: SearchGroup = {
- title: t('Popular Filters'),
- type: 'header',
- icon: ,
- childrenWrapper: RecommendedWrapper,
- children: [
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.FIELD,
- title: t('Issue Category'),
- value: 'issue.category:',
- },
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.FIELD,
- title: t('Error Level'),
- value: 'level:',
- },
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.FIELD,
- title: t('Assignee'),
- value: 'assigned_or_suggested:',
- },
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.FIELD,
- title: t('Unhandled Events'),
- value: 'error.unhandled:true ',
- },
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.FIELD,
- title: t('Latest Release'),
- value: 'release:latest ',
- },
- {
- type: ItemType.RECOMMENDED,
- kind: FieldKind.TAG,
- title: t('Custom Tags'),
- // Shows only tags when clicked
- applyFilter: item => item.kind === FieldKind.TAG,
- },
- ],
- };
const filterKeySections = useMemo(() => {
- return getFilterKeySections(issueTags, organization);
- }, [organization, issueTags]);
-
- const onChange = useCallback(
- (value: string) => {
- onClose?.(value, {validSearch: true});
- },
- [onClose]
- );
-
- if (organization.features.includes('issue-stream-search-query-builder')) {
- return (
-
- );
- }
+ return getFilterKeySections(issueTags);
+ }, [issueTags]);
return (
-
);
}
-export default withIssueTags(IssueListSearchBar);
-
-// Using grid-template-rows to order the items top to bottom, then left to right
-const RecommendedWrapper = styled('div')`
- display: grid;
- grid-template-rows: 1fr 1fr 1fr;
- grid-auto-flow: column;
- gap: ${space(1)};
- padding: ${space(1)};
- text-align: left;
- line-height: 1.2;
-
- & > li {
- ${p => p.theme.overflowEllipsis}
- border-radius: ${p => p.theme.borderRadius};
- border: 1px solid ${p => p.theme.border};
- padding: ${space(1)} ${space(1.5)};
- margin: 0;
- }
-
- @media (min-width: ${p => p.theme.breakpoints.small}) {
- grid-template-rows: 1fr 1fr;
- gap: ${space(1.5)};
- padding: ${space(1.5)};
- text-align: center;
-
- & > li {
- padding: ${space(1.5)} ${space(2)};
- }
- }
-`;
+export default IssueListSearchBar;
diff --git a/static/app/views/issueList/utils/useFetchIssueTags.tsx b/static/app/views/issueList/utils/useFetchIssueTags.tsx
index e7139515af0e28..7d8b669d76ff28 100644
--- a/static/app/views/issueList/utils/useFetchIssueTags.tsx
+++ b/static/app/views/issueList/utils/useFetchIssueTags.tsx
@@ -6,7 +6,6 @@ import {escapeTagValue} from 'sentry/components/smartSearchBar/utils';
import {IconStar, IconUser} from 'sentry/icons';
import {t} from 'sentry/locale';
import MemberListStore from 'sentry/stores/memberListStore';
-import {getBuiltInTags} from 'sentry/stores/tagStore';
import TeamStore from 'sentry/stores/teamStore';
import {useLegacyStore} from 'sentry/stores/useLegacyStore';
import {
@@ -20,7 +19,14 @@ import {
import type {Organization} from 'sentry/types/organization';
import type {User} from 'sentry/types/user';
import {SEMVER_TAGS} from 'sentry/utils/discover/fields';
-import {FieldKey, FieldKind, IsFieldValues, ISSUE_FIELDS} from 'sentry/utils/fields';
+import {
+ FieldKey,
+ FieldKind,
+ IsFieldValues,
+ ISSUE_EVENT_PROPERTY_FIELDS,
+ ISSUE_FIELDS,
+ ISSUE_PROPERTY_FIELDS,
+} from 'sentry/utils/fields';
import {Dataset} from 'sentry/views/alerts/rules/metric/types';
type UseFetchIssueTagsParams = {
@@ -34,6 +40,17 @@ type UseFetchIssueTagsParams = {
useCache?: boolean;
};
+const PREDEFINED_FIELDS = {
+ ...ISSUE_PROPERTY_FIELDS.reduce((acc, tag) => {
+ acc[tag] = {key: tag, name: tag, predefined: true, kind: FieldKind.ISSUE_FIELD};
+ return acc;
+ }, {}),
+ ...ISSUE_EVENT_PROPERTY_FIELDS.reduce((acc, tag) => {
+ acc[tag] = {key: tag, name: tag, predefined: false, kind: FieldKind.EVENT_FIELD};
+ return acc;
+ }, {}),
+};
+
// "environment" is excluded because it should be handled by the environment page filter
const EXCLUDED_TAGS = ['environment'];
@@ -85,16 +102,7 @@ export const useFetchIssueTags = ({
.filter(team => !team.isMember)
.map(team => `#${team.slug}`);
- const suggestedAssignees: string[] = [
- 'me',
- 'my_teams',
- 'none',
- // New search builder only works with single value suggestions
- ...(org.features.includes('issue-stream-search-query-builder')
- ? []
- : ['[me, my_teams, none]']),
- ...userTeams,
- ];
+ const suggestedAssignees: string[] = ['me', 'my_teams', 'none', ...userTeams];
const assignedValues: SearchGroup[] | string[] = [
{
@@ -138,7 +146,7 @@ export const useFetchIssueTags = ({
delete allTagsCollection[excludedTag];
}
- const additionalTags = builtInIssuesFields(org, allTagsCollection, assignedValues, [
+ const additionalTags = builtInIssuesFields(allTagsCollection, assignedValues, [
'me',
...usernames,
]);
@@ -147,7 +155,7 @@ export const useFetchIssueTags = ({
...allTagsCollection,
...additionalTags,
};
- }, [eventsTagsQuery.data, issuePlatformTagsQuery.data, members, org, teams]);
+ }, [eventsTagsQuery.data, issuePlatformTagsQuery.data, members, teams]);
return {
tags: allTags,
@@ -157,7 +165,6 @@ export const useFetchIssueTags = ({
};
function builtInIssuesFields(
- org: Organization,
currentTags: TagCollection,
assigneeFieldValues: SearchGroup[] | string[] = [],
bookmarksValues: string[] = []
@@ -169,9 +176,7 @@ function builtInIssuesFields(
[tag.key]: {
predefined: false,
...tag,
- kind: org.features.includes('issue-stream-search-query-builder')
- ? FieldKind.EVENT_FIELD
- : FieldKind.FIELD,
+ kind: FieldKind.EVENT_FIELD,
},
};
},
@@ -181,11 +186,10 @@ function builtInIssuesFields(
...Object.values(currentTags).map(tag => tag.key),
...Object.values(SEMVER_TAGS).map(tag => tag.key),
].sort();
- const builtInTags = getBuiltInTags(org);
const tagCollection: TagCollection = {
[FieldKey.IS]: {
- ...builtInTags[FieldKey.IS],
+ ...PREDEFINED_FIELDS[FieldKey.IS],
key: FieldKey.IS,
name: 'Status',
values: Object.values(IsFieldValues),
@@ -193,34 +197,34 @@ function builtInIssuesFields(
predefined: true,
},
[FieldKey.HAS]: {
- ...builtInTags[FieldKey.HAS],
+ ...PREDEFINED_FIELDS[FieldKey.HAS],
key: FieldKey.HAS,
name: 'Has Tag',
values: hasFieldValues,
predefined: true,
},
[FieldKey.ASSIGNED]: {
- ...builtInTags[FieldKey.ASSIGNED],
+ ...PREDEFINED_FIELDS[FieldKey.ASSIGNED],
key: FieldKey.ASSIGNED,
name: 'Assigned To',
values: assigneeFieldValues,
predefined: true,
},
[FieldKey.ASSIGNED_OR_SUGGESTED]: {
- ...builtInTags[FieldKey.ASSIGNED_OR_SUGGESTED],
+ ...PREDEFINED_FIELDS[FieldKey.ASSIGNED_OR_SUGGESTED],
name: 'Assigned or Suggested',
isInput: true,
values: assigneeFieldValues,
predefined: true,
},
[FieldKey.BOOKMARKS]: {
- ...builtInTags[FieldKey.BOOKMARKS],
+ ...PREDEFINED_FIELDS[FieldKey.BOOKMARKS],
name: 'Bookmarked By',
values: bookmarksValues,
predefined: true,
},
[FieldKey.ISSUE_CATEGORY]: {
- ...builtInTags[FieldKey.ISSUE_CATEGORY],
+ ...PREDEFINED_FIELDS[FieldKey.ISSUE_CATEGORY],
name: 'Issue Category',
values: [
IssueCategory.ERROR,
@@ -232,7 +236,7 @@ function builtInIssuesFields(
predefined: true,
},
[FieldKey.ISSUE_TYPE]: {
- ...builtInTags[FieldKey.ISSUE_TYPE],
+ ...PREDEFINED_FIELDS[FieldKey.ISSUE_TYPE],
name: 'Issue Type',
values: [
IssueType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES,
@@ -259,31 +263,31 @@ function builtInIssuesFields(
predefined: true,
},
[FieldKey.LAST_SEEN]: {
- ...builtInTags[FieldKey.LAST_SEEN],
+ ...PREDEFINED_FIELDS[FieldKey.LAST_SEEN],
name: 'Last Seen',
values: [],
predefined: false,
},
[FieldKey.FIRST_SEEN]: {
- ...builtInTags[FieldKey.FIRST_SEEN],
+ ...PREDEFINED_FIELDS[FieldKey.FIRST_SEEN],
name: 'First Seen',
values: [],
predefined: false,
},
[FieldKey.FIRST_RELEASE]: {
- ...builtInTags[FieldKey.FIRST_RELEASE],
+ ...PREDEFINED_FIELDS[FieldKey.FIRST_RELEASE],
name: 'First Release',
values: ['latest'],
predefined: true,
},
[FieldKey.EVENT_TIMESTAMP]: {
- ...builtInTags[FieldKey.EVENT_TIMESTAMP],
+ ...PREDEFINED_FIELDS[FieldKey.EVENT_TIMESTAMP],
name: 'Event Timestamp',
values: [],
predefined: true,
},
[FieldKey.TIMES_SEEN]: {
- ...builtInTags[FieldKey.TIMES_SEEN],
+ ...PREDEFINED_FIELDS[FieldKey.TIMES_SEEN],
name: 'Times Seen',
isInput: true,
// Below values are required or else SearchBar will attempt to get values
@@ -292,7 +296,7 @@ function builtInIssuesFields(
predefined: true,
},
[FieldKey.ISSUE_PRIORITY]: {
- ...builtInTags[FieldKey.ISSUE_PRIORITY],
+ ...PREDEFINED_FIELDS[FieldKey.ISSUE_PRIORITY],
name: 'Issue Priority',
values: [PriorityLevel.HIGH, PriorityLevel.MEDIUM, PriorityLevel.LOW],
predefined: true,
@@ -306,7 +310,11 @@ function builtInIssuesFields(
ISSUE_FIELDS.includes(key as FieldKey)
);
- return {...builtInTags, ...Object.fromEntries(filteredCollection), ...semverFields};
+ return {
+ ...PREDEFINED_FIELDS,
+ ...Object.fromEntries(filteredCollection),
+ ...semverFields,
+ };
}
const getUsername = ({isManaged, username, email}: User) => {
diff --git a/tests/acceptance/test_issue_saved_searches.py b/tests/acceptance/test_issue_saved_searches.py
index bb661fb5e06042..197ee8907eb4c5 100644
--- a/tests/acceptance/test_issue_saved_searches.py
+++ b/tests/acceptance/test_issue_saved_searches.py
@@ -60,17 +60,19 @@ def test_create_saved_search(self):
self.browser.find_element(by=By.NAME, value="name").send_keys("My Saved Search")
query_input = self.browser.find_element(
- by=By.CSS_SELECTOR, value='[role="dialog"] textarea'
+ by=By.CSS_SELECTOR, value='[role="dialog"] [data-test-id="query-builder-input"]'
)
- self.browser.click('[role="dialog"] button[aria-label="Clear search"]')
- query_input.send_keys("browser.name:Firefox", Keys.ENTER)
+ query_input.click()
+ query_input.send_keys("event.type:error", Keys.ENTER)
self.browser.click('[role="dialog"] button[aria-label="Save"]')
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
# The saved search should have been created with the correct options
created_search = SavedSearch.objects.get(name="My Saved Search")
assert created_search
- assert created_search.query == "browser.name:Firefox"
+ assert (
+ created_search.query == "is:unresolved issue.priority:[high, medium] event.type:error"
+ )
assert created_search.sort == SortOptions.DATE
assert created_search.visibility == Visibility.OWNER
assert not created_search.is_global
From 6a9290dd40c456e73486eba96bf64b60bcdad39a Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 12:42:54 -0700
Subject: [PATCH 088/139] fix(issues): Remove old trace view header zindex when
embedded (#78565)
---
static/app/components/events/interfaces/spans/header.tsx | 9 +++++++--
.../app/components/events/interfaces/spans/traceView.tsx | 1 +
2 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/static/app/components/events/interfaces/spans/header.tsx b/static/app/components/events/interfaces/spans/header.tsx
index 0069bc5ede0e1b..dee761b4cc69b4 100644
--- a/static/app/components/events/interfaces/spans/header.tsx
+++ b/static/app/components/events/interfaces/spans/header.tsx
@@ -50,6 +50,7 @@ type PropType = {
dragProps: DragManagerChildrenProps;
event: EventTransaction | AggregateEventTransaction;
generateBounds: (bounds: SpanBoundsType) => SpanGeneratedBoundsType;
+ isEmbedded: boolean;
minimapInteractiveRef: React.RefObject;
operationNameFilters: ActiveOperationFilter;
organization: Organization;
@@ -475,6 +476,7 @@ class TraceViewHeader extends Component {
{dividerHandlerChildrenProps => {
@@ -801,12 +803,15 @@ const DurationGuideBox = styled('div')<{alignLeft: boolean}>`
}};
`;
-export const HeaderContainer = styled('div')<{hasProfileMeasurementsChart: boolean}>`
+export const HeaderContainer = styled('div')<{
+ hasProfileMeasurementsChart: boolean;
+ isEmbedded: boolean;
+}>`
width: 100%;
position: sticky;
left: 0;
top: ${p => (ConfigStore.get('demoMode') ? p.theme.demo.headerSize : 0)};
- z-index: ${p => p.theme.zIndex.traceView.minimapContainer};
+ z-index: ${p => (p.isEmbedded ? 'initial' : p.theme.zIndex.traceView.minimapContainer)};
background-color: ${p => p.theme.background};
border-bottom: 1px solid ${p => p.theme.border};
height: ${p =>
diff --git a/static/app/components/events/interfaces/spans/traceView.tsx b/static/app/components/events/interfaces/spans/traceView.tsx
index 4c0e5874f895ee..6d36c40f609bb9 100644
--- a/static/app/components/events/interfaces/spans/traceView.tsx
+++ b/static/app/components/events/interfaces/spans/traceView.tsx
@@ -67,6 +67,7 @@ function TraceView(props: Props) {
viewStart: 0,
viewEnd: 1,
})}
+ isEmbedded={!!props.isEmbedded}
/>
);
}}
From 14e2ac473935957ec9223ce46b42c71f607a2b55 Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Thu, 3 Oct 2024 12:44:29 -0700
Subject: [PATCH 089/139] chore(feedback): remove user-feedback-ingest rollout
flag (#78097)
Removes this rollout flag since user feedback is GA'd. Applies the
options denylist directly (it was previously checked in the flag
[handler](https://github.com/getsentry/getsentry/blob/50d26987b5095171e0a8f4342b78c3393660d880/getsentry/features.py#L1504-L1515)).
Checks the denylist in the shim function. I think this is a better place
to check it - previously we were missing a spot in post-process
PR making this change safe for internal and external (self hosted)
relays: https://github.com/getsentry/relay/pull/4076
---
src/sentry/features/temporary.py | 2 -
.../feedback/usecases/create_feedback.py | 9 ++-
src/sentry/ingest/consumer/processors.py | 6 +-
src/sentry/ingest/userreport.py | 22 ++-----
src/sentry/tasks/update_user_reports.py | 12 ++--
src/sentry/web/frontend/error_page_embed.py | 9 +--
.../endpoints/test_project_user_reports.py | 38 ++++++------
.../feedback/usecases/test_create_feedback.py | 15 +++++
tests/sentry/ingest/test_userreport.py | 20 +------
.../sentry/tasks/test_update_user_reports.py | 2 +-
.../web/frontend/test_error_page_embed.py | 60 +++++++++----------
11 files changed, 93 insertions(+), 102 deletions(-)
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index c3f0e6f7d211f3..06e16837f9504c 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -510,8 +510,6 @@ def register_temporary_features(manager: FeatureManager):
# Enables uptime related settings for projects and orgs
manager.add('organizations:uptime-settings', OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
manager.add("organizations:use-metrics-layer", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=False)
- # Enable User Feedback v2 ingest
- manager.add("organizations:user-feedback-ingest", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Use ReplayClipPreview inside the User Feedback Details panel
manager.add("organizations:user-feedback-replay-clip", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable User Feedback spam auto filtering feature ingest
diff --git a/src/sentry/feedback/usecases/create_feedback.py b/src/sentry/feedback/usecases/create_feedback.py
index 57c9aae5e4a033..e379ba3c0dfcd4 100644
--- a/src/sentry/feedback/usecases/create_feedback.py
+++ b/src/sentry/feedback/usecases/create_feedback.py
@@ -8,7 +8,7 @@
import jsonschema
-from sentry import features
+from sentry import features, options
from sentry.constants import DataCategory
from sentry.eventstore.models import Event, GroupEvent
from sentry.feedback.usecases.spam_detection import is_spam
@@ -353,6 +353,9 @@ def shim_to_feedback(
User feedbacks are an event type, so we try and grab as much from the
legacy user report and event to create the new feedback.
"""
+ if is_in_feedback_denylist(project.organization):
+ return
+
try:
feedback_event: dict[str, Any] = {
"contexts": {
@@ -399,3 +402,7 @@ def auto_ignore_spam_feedbacks(project, issue_fingerprint):
new_substatus=GroupSubStatus.FOREVER,
),
)
+
+
+def is_in_feedback_denylist(organization):
+ return organization.slug in options.get("feedback.organizations.slug-denylist")
diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py
index 3f067f0d8dbfb2..1247a95f75ad53 100644
--- a/src/sentry/ingest/consumer/processors.py
+++ b/src/sentry/ingest/consumer/processors.py
@@ -13,7 +13,7 @@
from sentry.attachments import CachedAttachment, attachment_cache
from sentry.event_manager import save_attachment
from sentry.eventstore.processing import event_processing_store
-from sentry.feedback.usecases.create_feedback import FeedbackCreationSource
+from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, is_in_feedback_denylist
from sentry.ingest.userreport import Conflict, save_userreport
from sentry.killswitches import killswitch_matches_context
from sentry.models.project import Project
@@ -193,7 +193,7 @@ def process_event(
except Exception:
pass
elif data.get("type") == "feedback":
- if features.has("organizations:user-feedback-ingest", project.organization, actor=None):
+ if not is_in_feedback_denylist(project.organization):
save_event_feedback.delay(
cache_key=None, # no need to cache as volume is low
data=data,
@@ -201,6 +201,8 @@ def process_event(
event_id=event_id,
project_id=project_id,
)
+ else:
+ metrics.incr("feedback.ingest.filtered", tags={"reason": "org.denylist"})
else:
# Preprocess this event, which spawns either process_event or
# save_event. Pass data explicitly to avoid fetching it again from the
diff --git a/src/sentry/ingest/userreport.py b/src/sentry/ingest/userreport.py
index 71b777fc04c853..904f66418b89d3 100644
--- a/src/sentry/ingest/userreport.py
+++ b/src/sentry/ingest/userreport.py
@@ -6,10 +6,11 @@
from django.db import IntegrityError, router
from django.utils import timezone
-from sentry import eventstore, features, options
+from sentry import eventstore, options
from sentry.eventstore.models import Event, GroupEvent
from sentry.feedback.usecases.create_feedback import (
UNREAL_FEEDBACK_UNATTENDED_MESSAGE,
+ is_in_feedback_denylist,
shim_to_feedback,
)
from sentry.models.userreport import UserReport
@@ -32,7 +33,8 @@ def save_userreport(
start_time=None,
):
with metrics.timer("sentry.ingest.userreport.save_userreport"):
- if is_org_in_denylist(project.organization):
+ if is_in_feedback_denylist(project.organization):
+ metrics.incr("user_report.create_user_report.filtered", tags={"reason": "org.denylist"})
return
if should_filter_user_report(report["comments"]):
return
@@ -97,24 +99,19 @@ def save_userreport(
user_feedback_received.send(project=project, sender=save_userreport)
- has_feedback_ingest = features.has(
- "organizations:user-feedback-ingest", project.organization, actor=None
- )
logger.info(
"ingest.user_report",
extra={
"project_id": project.id,
"event_id": report["event_id"],
"has_event": bool(event),
- "has_feedback_ingest": has_feedback_ingest,
},
)
metrics.incr(
"user_report.create_user_report.saved",
- tags={"has_event": bool(event), "has_feedback_ingest": has_feedback_ingest},
+ tags={"has_event": bool(event)},
)
-
- if has_feedback_ingest and event:
+ if event:
logger.info(
"ingest.user_report.shim_to_feedback",
extra={"project_id": project.id, "event_id": report["event_id"]},
@@ -150,10 +147,3 @@ def should_filter_user_report(comments: str):
return True
return False
-
-
-def is_org_in_denylist(organization):
- if organization.slug in options.get("feedback.organizations.slug-denylist"):
- metrics.incr("user_report.create_user_report.filtered", tags={"reason": "org.denylist"})
- return True
- return False
diff --git a/src/sentry/tasks/update_user_reports.py b/src/sentry/tasks/update_user_reports.py
index fbdcaca4de187e..0075f46dd8e02e 100644
--- a/src/sentry/tasks/update_user_reports.py
+++ b/src/sentry/tasks/update_user_reports.py
@@ -5,8 +5,12 @@
import sentry_sdk
from django.utils import timezone
-from sentry import eventstore, features, quotas
-from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, shim_to_feedback
+from sentry import eventstore, quotas
+from sentry.feedback.usecases.create_feedback import (
+ FeedbackCreationSource,
+ is_in_feedback_denylist,
+ shim_to_feedback,
+)
from sentry.models.project import Project
from sentry.models.userreport import UserReport
from sentry.silo.base import SiloMode
@@ -86,9 +90,7 @@ def update_user_reports(**kwargs: Any) -> None:
for event in events:
report = report_by_event.get(event.event_id)
if report:
- if features.has(
- "organizations:user-feedback-ingest", project.organization, actor=None
- ):
+ if not is_in_feedback_denylist(project.organization):
logger.info(
"update_user_reports.shim_to_feedback",
extra={"report_id": report.id, "event_id": event.event_id},
diff --git a/src/sentry/web/frontend/error_page_embed.py b/src/sentry/web/frontend/error_page_embed.py
index fdfc803460ce42..68c608e88303be 100644
--- a/src/sentry/web/frontend/error_page_embed.py
+++ b/src/sentry/web/frontend/error_page_embed.py
@@ -11,7 +11,7 @@
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import View
-from sentry import eventstore, features
+from sentry import eventstore
from sentry.feedback.usecases.create_feedback import FeedbackCreationSource, shim_to_feedback
from sentry.models.options.project_option import ProjectOption
from sentry.models.project import Project
@@ -194,12 +194,7 @@ def dispatch(self, request: HttpRequest) -> HttpResponse:
)
project = Project.objects.get(id=report.project_id)
- if (
- features.has(
- "organizations:user-feedback-ingest", project.organization, actor=request.user
- )
- and event is not None
- ):
+ if event is not None:
shim_to_feedback(
{
"name": report.name,
diff --git a/tests/sentry/api/endpoints/test_project_user_reports.py b/tests/sentry/api/endpoints/test_project_user_reports.py
index 982866e6218f81..a057863df6e9e8 100644
--- a/tests/sentry/api/endpoints/test_project_user_reports.py
+++ b/tests/sentry/api/endpoints/test_project_user_reports.py
@@ -432,16 +432,15 @@ def test_simple_shim_to_feedback(self, mock_produce_occurrence_to_kafka):
url = _make_url(self.project)
- with self.feature("organizations:user-feedback-ingest"):
- response = self.client.post(
- url,
- data={
- "event_id": event_with_replay.event_id,
- "email": "foo@example.com",
- "name": "Foo Bar",
- "comments": "It broke!",
- },
- )
+ response = self.client.post(
+ url,
+ data={
+ "event_id": event_with_replay.event_id,
+ "email": "foo@example.com",
+ "name": "Foo Bar",
+ "comments": "It broke!",
+ },
+ )
assert response.status_code == 200, response.content
@@ -480,16 +479,15 @@ def test_simple_shim_to_feedback_no_event_should_not_call(
url = _make_url(self.project)
event_id = uuid4().hex
- with self.feature("organizations:user-feedback-ingest"):
- response = self.client.post(
- url,
- data={
- "event_id": event_id,
- "email": "foo@example.com",
- "name": "Foo Bar",
- "comments": "It broke!",
- },
- )
+ response = self.client.post(
+ url,
+ data={
+ "event_id": event_id,
+ "email": "foo@example.com",
+ "name": "Foo Bar",
+ "comments": "It broke!",
+ },
+ )
assert response.status_code == 200, response.content
diff --git a/tests/sentry/feedback/usecases/test_create_feedback.py b/tests/sentry/feedback/usecases/test_create_feedback.py
index 69496b24ff69a0..7dd67ab96eb867 100644
--- a/tests/sentry/feedback/usecases/test_create_feedback.py
+++ b/tests/sentry/feedback/usecases/test_create_feedback.py
@@ -14,6 +14,7 @@
FeedbackCreationSource,
create_feedback_issue,
fix_for_issue_platform,
+ is_in_feedback_denylist,
shim_to_feedback,
validate_issue_platform_event_schema,
)
@@ -814,3 +815,17 @@ def test_shim_to_feedback_missing_fields(default_project, monkeypatch):
report_dict, event, default_project, FeedbackCreationSource.USER_REPORT_ENVELOPE # type: ignore[arg-type]
)
assert mock_create_feedback_issue.call_count == 0
+
+
+@django_db_all
+def test_denylist(set_sentry_option, default_project):
+ with set_sentry_option(
+ "feedback.organizations.slug-denylist", [default_project.organization.slug]
+ ):
+ assert is_in_feedback_denylist(default_project.organization) is True
+
+
+@django_db_all
+def test_denylist_not_in_list(set_sentry_option, default_project):
+ with set_sentry_option("feedback.organizations.slug-denylist", ["not-in-list"]):
+ assert is_in_feedback_denylist(default_project.organization) is False
diff --git a/tests/sentry/ingest/test_userreport.py b/tests/sentry/ingest/test_userreport.py
index 4b9bef8e370bc8..faa483743cd8ca 100644
--- a/tests/sentry/ingest/test_userreport.py
+++ b/tests/sentry/ingest/test_userreport.py
@@ -1,5 +1,5 @@
from sentry.feedback.usecases.create_feedback import UNREAL_FEEDBACK_UNATTENDED_MESSAGE
-from sentry.ingest.userreport import is_org_in_denylist, save_userreport, should_filter_user_report
+from sentry.ingest.userreport import save_userreport, should_filter_user_report
from sentry.models.userreport import UserReport
from sentry.testutils.pytest.fixtures import django_db_all
@@ -22,24 +22,10 @@ def test_empty_message(set_sentry_option):
assert should_filter_user_report("") is True
-@django_db_all
-def test_org_denylist(set_sentry_option, default_project):
- with set_sentry_option(
- "feedback.organizations.slug-denylist", [default_project.organization.slug]
- ):
- assert is_org_in_denylist(default_project.organization) is True
-
-
-@django_db_all
-def test_org_denylist_not_in_list(set_sentry_option, default_project):
- with set_sentry_option("feedback.organizations.slug-denylist", ["not-in-list"]):
- assert is_org_in_denylist(default_project.organization) is False
-
-
@django_db_all
def test_save_user_report_returns_instance(set_sentry_option, default_project, monkeypatch):
# Mocking dependencies and setting up test data
- monkeypatch.setattr("sentry.ingest.userreport.is_org_in_denylist", lambda org: False)
+ monkeypatch.setattr("sentry.ingest.userreport.is_in_feedback_denylist", lambda org: False)
monkeypatch.setattr("sentry.ingest.userreport.should_filter_user_report", lambda message: False)
monkeypatch.setattr(
"sentry.ingest.userreport.UserReport.objects.create", lambda **kwargs: UserReport()
@@ -66,7 +52,7 @@ def test_save_user_report_returns_instance(set_sentry_option, default_project, m
@django_db_all
def test_save_user_report_denylist(set_sentry_option, default_project, monkeypatch):
- monkeypatch.setattr("sentry.ingest.userreport.is_org_in_denylist", lambda org: True)
+ monkeypatch.setattr("sentry.ingest.userreport.is_in_feedback_denylist", lambda org: True)
report = {
"event_id": "123456",
"name": "Test User",
diff --git a/tests/sentry/tasks/test_update_user_reports.py b/tests/sentry/tasks/test_update_user_reports.py
index 9e2b07b55f3fc9..1bed0fa40fa172 100644
--- a/tests/sentry/tasks/test_update_user_reports.py
+++ b/tests/sentry/tasks/test_update_user_reports.py
@@ -140,7 +140,7 @@ def test_simple_calls_feedback_shim_if_ff_enabled(self, mock_produce_occurrence_
email="foo@example.com",
name="Foo Bar",
)
- with self.feature("organizations:user-feedback-ingest"), self.tasks():
+ with self.tasks():
update_user_reports(max_events=2)
report1 = UserReport.objects.get(project_id=project.id, event_id=event1.event_id)
diff --git a/tests/sentry/web/frontend/test_error_page_embed.py b/tests/sentry/web/frontend/test_error_page_embed.py
index 444d289d088270..fe3e462e2c67d3 100644
--- a/tests/sentry/web/frontend/test_error_page_embed.py
+++ b/tests/sentry/web/frontend/test_error_page_embed.py
@@ -238,39 +238,37 @@ def test_environment_gets_user_report(self):
@mock.patch("sentry.feedback.usecases.create_feedback.produce_occurrence_to_kafka")
def test_calls_feedback_shim_if_ff_enabled(self, mock_produce_occurrence_to_kafka):
self.make_event(environment=self.environment.name, event_id=self.event_id)
- with self.feature({"organizations:user-feedback-ingest": True}):
- self.client.post(
- self.path,
- {
- "name": "Jane Bloggs",
- "email": "jane@example.com",
- "comments": "This is an example!",
- },
- HTTP_REFERER="http://example.com",
- HTTP_ACCEPT="application/json",
- )
- assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1
- mock_event_data = mock_produce_occurrence_to_kafka.call_args_list[0][1]["event_data"]
- assert mock_event_data["contexts"]["feedback"]["contact_email"] == "jane@example.com"
- assert mock_event_data["contexts"]["feedback"]["message"] == "This is an example!"
- assert mock_event_data["contexts"]["feedback"]["name"] == "Jane Bloggs"
- assert mock_event_data["platform"] == "other"
- assert mock_event_data["contexts"]["feedback"]["associated_event_id"] == self.event_id
- assert mock_event_data["level"] == "error"
+ self.client.post(
+ self.path,
+ {
+ "name": "Jane Bloggs",
+ "email": "jane@example.com",
+ "comments": "This is an example!",
+ },
+ HTTP_REFERER="http://example.com",
+ HTTP_ACCEPT="application/json",
+ )
+ assert len(mock_produce_occurrence_to_kafka.mock_calls) == 1
+ mock_event_data = mock_produce_occurrence_to_kafka.call_args_list[0][1]["event_data"]
+ assert mock_event_data["contexts"]["feedback"]["contact_email"] == "jane@example.com"
+ assert mock_event_data["contexts"]["feedback"]["message"] == "This is an example!"
+ assert mock_event_data["contexts"]["feedback"]["name"] == "Jane Bloggs"
+ assert mock_event_data["platform"] == "other"
+ assert mock_event_data["contexts"]["feedback"]["associated_event_id"] == self.event_id
+ assert mock_event_data["level"] == "error"
@mock.patch("sentry.feedback.usecases.create_feedback.produce_occurrence_to_kafka")
def test_does_not_call_feedback_shim_no_event_if_ff_enabled(
self, mock_produce_occurrence_to_kafka
):
- with self.feature({"organizations:user-feedback-ingest": True}):
- self.client.post(
- self.path,
- {
- "name": "Jane Bloggs",
- "email": "jane@example.com",
- "comments": "This is an example!",
- },
- HTTP_REFERER="http://example.com",
- HTTP_ACCEPT="application/json",
- )
- assert len(mock_produce_occurrence_to_kafka.mock_calls) == 0
+ self.client.post(
+ self.path,
+ {
+ "name": "Jane Bloggs",
+ "email": "jane@example.com",
+ "comments": "This is an example!",
+ },
+ HTTP_REFERER="http://example.com",
+ HTTP_ACCEPT="application/json",
+ )
+ assert len(mock_produce_occurrence_to_kafka.mock_calls) == 0
From d558e2816d0b8540d7b3b566b1084f340c750ef6 Mon Sep 17 00:00:00 2001
From: Alberto Leal
Date: Thu, 3 Oct 2024 15:45:22 -0400
Subject: [PATCH 090/139] feat(billing): Add
quotas.backend.should_emit_profile_duration_outcome (#78560)
We want to be able to support continuous profiling (profiles v2) on
pre-AM3 plans.
In order to do so, we need to prevent the emission of
`DataCategory.PROFILE_DURATION` for transactions based profiling
(profiles v1) for pre-AM3 plans. This prevents doubling billing on
profiles v1 for AM2 customers.
We will be able to do this by introducing
`quotas.backend.should_emit_profile_duration_outcome` method.
In getsentry, we will override this method, and appropriately check if
we need to emit `DataCategory.PROFILE_DURATION` based on the
organization's plan.
---
src/sentry/profiles/task.py | 5 +-
src/sentry/quotas/base.py | 10 ++++
tests/sentry/profiles/test_task.py | 87 ++++++++++++++++++++++++++++++
3 files changed, 101 insertions(+), 1 deletion(-)
diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py
index f888cf2bda68d8..0149b9159366d6 100644
--- a/src/sentry/profiles/task.py
+++ b/src/sentry/profiles/task.py
@@ -187,7 +187,10 @@ def process_profile_task(
if not project.flags.has_profiles:
first_profile_received.send_robust(project=project, sender=Project)
try:
- _track_duration_outcome(profile=profile, project=project)
+ if quotas.backend.should_emit_profile_duration_outcome(
+ organization=organization, profile=profile
+ ):
+ _track_duration_outcome(profile=profile, project=project)
except Exception as e:
sentry_sdk.capture_exception(e)
if profile.get("version") != "2":
diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py
index 4b00ff14965821..376a69acfa3a6a 100644
--- a/src/sentry/quotas/base.py
+++ b/src/sentry/quotas/base.py
@@ -15,9 +15,11 @@
from sentry.utils.services import Service
if TYPE_CHECKING:
+ from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.models.projectkey import ProjectKey
from sentry.monitors.models import Monitor
+ from sentry.profiles.task import Profile
@unique
@@ -653,3 +655,11 @@ def update_monitor_slug(self, previous_slug: str, new_slug: str, project_id: int
"""
Updates a monitor seat assignment's slug.
"""
+
+ def should_emit_profile_duration_outcome(
+ self, organization: Organization, profile: Profile
+ ) -> bool:
+ """
+ Determines if the profile duration outcome should be emitted.
+ """
+ return True
diff --git a/tests/sentry/profiles/test_task.py b/tests/sentry/profiles/test_task.py
index 36873137708230..4491a027bf3301 100644
--- a/tests/sentry/profiles/test_task.py
+++ b/tests/sentry/profiles/test_task.py
@@ -5,6 +5,7 @@
from os.path import join
from tempfile import TemporaryFile
from typing import Any
+from unittest.mock import patch
import pytest
from django.core.files.uploadedfile import SimpleUploadedFile
@@ -27,6 +28,7 @@
_set_frames_platform,
_symbolicate_profile,
get_metrics_dsn,
+ process_profile_task,
)
from sentry.testutils.cases import TransactionTestCase
from sentry.testutils.factories import Factories, get_fixture_path
@@ -911,3 +913,88 @@ def test_get_metrics_dsn(default_project):
ProjectKey.objects.create(project_id=default_project.id, use_case=UseCase.PROFILING.value)
assert get_metrics_dsn(default_project.id) == key1.get_dsn(public=True)
+
+
+@patch("sentry.profiles.task._track_outcome")
+@patch("sentry.profiles.task._track_duration_outcome")
+@patch("sentry.profiles.task._symbolicate_profile")
+@patch("sentry.profiles.task._deobfuscate_profile")
+@patch("sentry.profiles.task._push_profile_to_vroom")
+@django_db_all
+@pytest.mark.parametrize(
+ "profile",
+ ["sample_v1_profile", "sample_v2_profile"],
+)
+def test_process_profile_task_should_emit_profile_duration_outcome(
+ _push_profile_to_vroom,
+ _deobfuscate_profile,
+ _symbolicate_profile,
+ _track_duration_outcome,
+ _track_outcome,
+ profile,
+ organization,
+ project,
+ request,
+):
+ _push_profile_to_vroom.return_value = True
+ _deobfuscate_profile.return_value = True
+ _symbolicate_profile.return_value = True
+
+ profile = request.getfixturevalue(profile)
+ profile["organization_id"] = organization.id
+ profile["project_id"] = project.id
+
+ process_profile_task(profile=profile)
+
+ assert _track_duration_outcome.call_count == 1
+
+ if profile.get("version") != "2":
+ assert _track_outcome.call_count == 1
+ else:
+ assert _track_outcome.call_count == 0
+
+
+@patch("sentry.quotas.backend.should_emit_profile_duration_outcome")
+@patch("sentry.profiles.task._track_outcome")
+@patch("sentry.profiles.task._track_duration_outcome")
+@patch("sentry.profiles.task._symbolicate_profile")
+@patch("sentry.profiles.task._deobfuscate_profile")
+@patch("sentry.profiles.task._push_profile_to_vroom")
+@django_db_all
+@pytest.mark.parametrize(
+ "profile",
+ ["sample_v1_profile", "sample_v2_profile"],
+)
+def test_process_profile_task_should_not_emit_profile_duration_outcome(
+ _push_profile_to_vroom,
+ _deobfuscate_profile,
+ _symbolicate_profile,
+ _track_duration_outcome,
+ _track_outcome,
+ should_emit_profile_duration_outcome,
+ profile,
+ organization,
+ project,
+ request,
+):
+ _push_profile_to_vroom.return_value = True
+ _deobfuscate_profile.return_value = True
+ _symbolicate_profile.return_value = True
+ should_emit_profile_duration_outcome.return_value = False
+
+ profile = request.getfixturevalue(profile)
+ profile["organization_id"] = organization.id
+ profile["project_id"] = project.id
+
+ process_profile_task(profile=profile)
+
+ assert _track_duration_outcome.call_count == 0
+ assert should_emit_profile_duration_outcome.call_count == 1
+ should_emit_profile_duration_outcome.assert_called_with(
+ organization=organization, profile=profile
+ )
+
+ if profile.get("version") != "2":
+ assert _track_outcome.call_count == 1
+ else:
+ assert _track_outcome.call_count == 0
From de06a25eee46c13c1d12a20e6803586f5dd206a7 Mon Sep 17 00:00:00 2001
From: Colton Allen
Date: Thu, 3 Oct 2024 14:58:06 -0500
Subject: [PATCH 091/139] fix(flags): Temporarily disable feature flag (#78567)
It's not working for whatever reason and I can't debug it. The harm of
not having this gated by a flag is zero so I'm just removing it for now
so we can test.
---
src/sentry/flags/endpoints/logs.py | 12 ++++++------
tests/sentry/flags/endpoints/test_logs.py | 17 +++++++++--------
2 files changed, 15 insertions(+), 14 deletions(-)
diff --git a/src/sentry/flags/endpoints/logs.py b/src/sentry/flags/endpoints/logs.py
index e12633129de44e..8da767df45f7f9 100644
--- a/src/sentry/flags/endpoints/logs.py
+++ b/src/sentry/flags/endpoints/logs.py
@@ -5,7 +5,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import features
+# from sentry import features
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -48,8 +48,8 @@ class OrganizationFlagLogIndexEndpoint(OrganizationEndpoint):
publish_status = {"GET": ApiPublishStatus.PRIVATE}
def get(self, request: Request, organization: Organization) -> Response:
- if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
- raise ResourceDoesNotExist
+ # if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ # raise ResourceDoesNotExist
start, end = get_date_range_from_params(request.GET)
if start is None or end is None:
@@ -77,14 +77,14 @@ class OrganizationFlagLogDetailsEndpoint(OrganizationEndpoint):
publish_status = {"GET": ApiPublishStatus.PRIVATE}
def get(self, request: Request, organization: Organization, flag_log_id: int) -> Response:
- if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
- raise ResourceDoesNotExist
+ # if not features.has("organizations:feature-flag-ui", organization, actor=request.user):
+ # raise ResourceDoesNotExist
try:
model = FlagAuditLogModel.objects.filter(
id=flag_log_id,
organization_id=organization.id,
- ).first()
+ ).get()
except FlagAuditLogModel.DoesNotExist:
raise ResourceDoesNotExist
diff --git a/tests/sentry/flags/endpoints/test_logs.py b/tests/sentry/flags/endpoints/test_logs.py
index d2d521fb6d76ef..6efc54bde2f70d 100644
--- a/tests/sentry/flags/endpoints/test_logs.py
+++ b/tests/sentry/flags/endpoints/test_logs.py
@@ -52,9 +52,9 @@ def test_get_unauthorized_organization(self):
response = self.client.get(url)
assert response.status_code == 403
- def test_get_feature_disabled(self):
- response = self.client.get(self.url)
- assert response.status_code == 404
+ # def test_get_feature_disabled(self):
+ # response = self.client.get(self.url)
+ # assert response.status_code == 404
def test_get_stats_period(self):
model = FlagAuditLogModel(
@@ -142,9 +142,10 @@ def test_get_unauthorized_organization(self):
assert response.status_code == 403
def test_get_no_flag(self):
- response = self.client.get(reverse(self.endpoint, args=(self.organization.id, 123)))
- assert response.status_code == 404
+ with self.feature(self.features):
+ response = self.client.get(reverse(self.endpoint, args=(self.organization.id, 123)))
+ assert response.status_code == 404
- def test_get_feature_disabled(self):
- response = self.client.get(self.url)
- assert response.status_code == 404
+ # def test_get_feature_disabled(self):
+ # response = self.client.get(self.url)
+ # assert response.status_code == 404
From 6d5f99039ff28fdb33d785ee27a0ab372d41848e Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Thu, 3 Oct 2024 12:58:36 -0700
Subject: [PATCH 092/139] chore(grouping): Backfill `latest_grouping_config` in
grouphash metadata (#78367)
This is a follow-up to https://github.com/getsentry/sentry/pull/78366, which added a nullable `latest_grouping_config` field to the `GroupHashMetadata`. We have been using the `newstyle:2023-01-11` config the entire time we've been writing records to the table, so we can be confident that, had the column existed from the beginning, it would have had that value in every row, making it easy to the the backfill.
---
migrations_lockfile.txt | 2 +-
...fill_grouphash_metadata_grouping_config.py | 45 +++++++++++++++++++
src/sentry/models/grouphashmetadata.py | 1 -
3 files changed, 46 insertions(+), 2 deletions(-)
create mode 100644 src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index 4b95fa70a4d7ca..8a8a48b97003e8 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -10,7 +10,7 @@ hybridcloud: 0016_add_control_cacheversion
nodestore: 0002_nodestore_no_dictfield
remote_subscriptions: 0003_drop_remote_subscription
replays: 0004_index_together
-sentry: 0771_add_grouping_config_to_grouphash_metadata
+sentry: 0772_backfill_grouphash_metadata_grouping_config
social_auth: 0002_default_auto_field
uptime: 0016_translate_uptime_object_headers_to_lists
workflow_engine: 0009_detector_type
diff --git a/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py b/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py
new file mode 100644
index 00000000000000..6e2474e507c8ff
--- /dev/null
+++ b/src/sentry/migrations/0772_backfill_grouphash_metadata_grouping_config.py
@@ -0,0 +1,45 @@
+# Generated by Django 5.1.1 on 2024-10-01 00:47
+
+from django.apps.registry import Apps
+from django.db import migrations
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+DEFAULT_GROUPING_CONFIG = "newstyle:2023-01-11"
+
+
+def fill_in_missing_grouping_config(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None:
+ GroupHashMetadata = apps.get_model("sentry", "GroupHashMetadata")
+
+ for gh_metadata in GroupHashMetadata.objects.filter(latest_grouping_config=None):
+ gh_metadata.latest_grouping_config = DEFAULT_GROUPING_CONFIG
+ gh_metadata.save(update_fields=["latest_grouping_config"])
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production.
+ # This should only be used for operations where it's safe to run the migration after your
+ # code has deployed. So this should not be used for most operations that alter the schema
+ # of a table.
+ # Here are some things that make sense to mark as post deployment:
+ # - Large data migrations. Typically we want these to be run manually so that they can be
+ # monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # run this outside deployments so that we don't block them. Note that while adding an index
+ # is a schema change, it's completely safe to run the operation after the code has deployed.
+ # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment
+
+ is_post_deployment = False
+
+ dependencies = [
+ ("sentry", "0771_add_grouping_config_to_grouphash_metadata"),
+ ]
+
+ operations = [
+ migrations.RunPython(
+ fill_in_missing_grouping_config,
+ migrations.RunPython.noop,
+ hints={"tables": ["sentry_groupedhashmetadata"]},
+ ),
+ ]
diff --git a/src/sentry/models/grouphashmetadata.py b/src/sentry/models/grouphashmetadata.py
index 64c6e16a940b1a..10bbb9bd2c09d1 100644
--- a/src/sentry/models/grouphashmetadata.py
+++ b/src/sentry/models/grouphashmetadata.py
@@ -21,7 +21,6 @@ class GroupHashMetadata(Model):
# HASHING
# Most recent config to produce this hash
- # TODO: Backfill the current config for grouphashes with metadata and then make this non-nullable
latest_grouping_config = models.CharField(null=True)
# SEER
From 73f9f4fc01834e8be6b13fcecb8d5d5609952ce1 Mon Sep 17 00:00:00 2001
From: Seiji Chew <67301797+schew2381@users.noreply.github.com>
Date: Thu, 3 Oct 2024 13:27:52 -0700
Subject: [PATCH 093/139] chore(ui): Remove searchbar flag for alerts team
(#78440)
Removing the `search-query-builder-alerts` flag from FE
---------
Co-authored-by: Malachi Willey
---
.../rules/metric/ruleConditionsForm.tsx | 155 ++++--------------
.../alerts/rules/metric/ruleForm.spec.tsx | 22 +--
2 files changed, 40 insertions(+), 137 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
index 885f104a033bec..81f85823e72159 100644
--- a/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleConditionsForm.tsx
@@ -12,7 +12,7 @@ import {
OnDemandMetricAlert,
OnDemandWarningIcon,
} from 'sentry/components/alerts/onDemandMetricAlert';
-import SearchBar, {getHasTag} from 'sentry/components/events/searchBar';
+import {getHasTag} from 'sentry/components/events/searchBar';
import {
STATIC_FIELD_TAGS,
STATIC_FIELD_TAGS_WITHOUT_ERROR_FIELDS,
@@ -32,7 +32,6 @@ import Panel from 'sentry/components/panels/panel';
import PanelBody from 'sentry/components/panels/panelBody';
import {SearchQueryBuilder} from 'sentry/components/searchQueryBuilder';
import {InvalidReason} from 'sentry/components/searchSyntax/parser';
-import {SearchInvalidTag} from 'sentry/components/smartSearchBar/searchInvalidTag';
import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {ActivationConditionType, MonitorType} from 'sentry/types/alerts';
@@ -715,7 +714,7 @@ class RuleConditionsForm extends PureComponent {
}}
flexibleControlStateSize
>
- {({onChange, onBlur, onKeyDown, initialData, value}) => {
+ {({onChange, onBlur, initialData, value}) => {
return (hasCustomMetrics(organization) &&
alertType === 'custom_metrics') ||
alertType === 'insights_metrics' ? (
@@ -734,111 +733,39 @@ class RuleConditionsForm extends PureComponent {
/>
) : (
- {organization.features.includes('search-query-builder-alerts') ? (
- {
- onFilterSearch(query, true);
- onChange(query, {});
- }}
- onBlur={(query, {parsedQuery}) => {
- onFilterSearch(query, parsedQuery);
- onBlur(query);
- }}
- // We only need strict validation for Transaction queries, everything else is fine
- disallowUnsupportedFilters={
- organization.features.includes('alert-allow-indexed') ||
- (hasOnDemandMetricAlertFeature(organization) &&
- isOnDemandQueryString(value))
- ? false
- : dataset === Dataset.GENERIC_METRICS
- }
- />
- ) : (
- {
- if (dataset !== Dataset.GENERIC_METRICS) {
- return null;
- }
- return (
- {item.desc},
- }
- )}
- docLink="https://docs.sentry.io/product/alerts/create-alerts/metric-alert-config/#tags--properties"
- />
- );
- }}
- searchSource="alert_builder"
- defaultQuery={initialData?.query ?? ''}
- {...getSupportedAndOmittedTags(dataset, organization)}
- includeSessionTagsValues={dataset === Dataset.SESSIONS}
- disabled={disabled || isErrorMigration}
- useFormWrapper={false}
- organization={organization}
- placeholder={this.searchPlaceholder}
- onChange={onChange}
- query={initialData.query}
- // We only need strict validation for Transaction queries, everything else is fine
- highlightUnsupportedTags={
- organization.features.includes('alert-allow-indexed') ||
- (hasOnDemandMetricAlertFeature(organization) &&
- isOnDemandQueryString(value))
- ? false
- : dataset === Dataset.GENERIC_METRICS
- }
- onKeyDown={e => {
- /**
- * Do not allow enter key to submit the alerts form since it is unlikely
- * users will be ready to create the rule as this sits above required fields.
- */
- if (e.key === 'Enter') {
- e.preventDefault();
- e.stopPropagation();
- }
- onKeyDown?.(e);
- }}
- onClose={(query, {validSearch}) => {
- onFilterSearch(query, validSearch);
- onBlur(query);
- }}
- onSearch={query => {
- onFilterSearch(query, true);
- onChange(query, {});
- }}
- hasRecentSearches={dataset !== Dataset.SESSIONS}
- />
- )}
+ {
+ onFilterSearch(query, true);
+ onChange(query, {});
+ }}
+ onBlur={(query, {parsedQuery}) => {
+ onFilterSearch(query, parsedQuery);
+ onBlur(query);
+ }}
+ // We only need strict validation for Transaction queries, everything else is fine
+ disallowUnsupportedFilters={
+ organization.features.includes('alert-allow-indexed') ||
+ (hasOnDemandMetricAlertFeature(organization) &&
+ isOnDemandQueryString(value))
+ ? false
+ : dataset === Dataset.GENERIC_METRICS
+ }
+ />
{isExtrapolatedChartData && isOnDemandQueryString(value) && (
- p.disabled &&
- `
- background: ${p.theme.backgroundSecondary};
- color: ${p.theme.disabled};
- cursor: not-allowed;
- `}
-`;
-
const StyledListItem = styled(ListItem)`
margin-bottom: ${space(0.5)};
font-size: ${p => p.theme.fontSizeExtraLarge};
diff --git a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
index 7d58b803844268..da41b817663ddd 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx
@@ -3,7 +3,7 @@ import {IncidentTriggerFixture} from 'sentry-fixture/incidentTrigger';
import {MetricRuleFixture} from 'sentry-fixture/metricRule';
import {initializeOrg} from 'sentry-test/initializeOrg';
-import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
import selectEvent from 'sentry-test/selectEvent';
import {addErrorMessage} from 'sentry/actionCreators/indicator';
@@ -668,14 +668,6 @@ describe('Incident Rules Form', () => {
describe('Slack async lookup', () => {
const uuid = 'xxxx-xxxx-xxxx';
- beforeEach(() => {
- jest.useFakeTimers();
- });
-
- afterEach(() => {
- jest.useRealTimers();
- });
-
it('success status updates the rule', async () => {
const alertRule = MetricRuleFixture({name: 'Slack Alert Rule'});
MockApiClient.addMockResponse({
@@ -699,17 +691,16 @@ describe('Incident Rules Form', () => {
onSubmitSuccess,
});
- act(jest.runAllTimers);
+ await screen.findByTestId('loading-indicator');
await userEvent.type(
await screen.findByPlaceholderText('Enter Alert Name'),
'Slack Alert Rule',
{delay: null}
);
- await userEvent.click(screen.getByLabelText('Save Rule'), {delay: null});
+ await userEvent.click(await screen.findByLabelText('Save Rule'), {delay: null});
- expect(screen.getByTestId('loading-indicator')).toBeInTheDocument();
+ expect(await screen.findByTestId('loading-indicator')).toBeInTheDocument();
- act(jest.runAllTimers);
await waitFor(
() => {
expect(onSubmitSuccess).toHaveBeenCalledWith(
@@ -746,7 +737,6 @@ describe('Incident Rules Form', () => {
onSubmitSuccess,
});
- act(jest.runAllTimers);
expect(await screen.findByTestId('loading-indicator')).toBeInTheDocument();
expect(onSubmitSuccess).not.toHaveBeenCalled();
});
@@ -773,15 +763,13 @@ describe('Incident Rules Form', () => {
rule: alertRule,
onSubmitSuccess,
});
- act(jest.runAllTimers);
await userEvent.type(
await screen.findByPlaceholderText('Enter Alert Name'),
'Slack Alert Rule',
{delay: null}
);
- await userEvent.click(screen.getByLabelText('Save Rule'), {delay: null});
+ await userEvent.click(await screen.findByLabelText('Save Rule'), {delay: null});
- act(jest.runAllTimers);
await waitFor(
() => {
expect(addErrorMessage).toHaveBeenCalledWith('An error occurred');
From 03cc3cdee634b4f531553282f4fa68ed15510b69 Mon Sep 17 00:00:00 2001
From: Jodi Jang <116035587+jangjodi@users.noreply.github.com>
Date: Thu, 3 Oct 2024 13:29:17 -0700
Subject: [PATCH 094/139] ref(similarity): Stop sending message in seer
similarity call (#78034)
Stop sending event message to seer similarity call in both ingest and
similar issues tab
---
src/sentry/grouping/ingest/seer.py | 1 -
.../issues/endpoints/group_similar_issues_embeddings.py | 2 --
src/sentry/seer/similarity/similar_issues.py | 5 +----
src/sentry/seer/similarity/types.py | 1 -
tests/sentry/grouping/ingest/test_seer.py | 1 -
.../endpoints/test_group_similar_issues_embeddings.py | 6 ------
tests/sentry/seer/similarity/test_similar_issues.py | 2 --
7 files changed, 1 insertion(+), 17 deletions(-)
diff --git a/src/sentry/grouping/ingest/seer.py b/src/sentry/grouping/ingest/seer.py
index 2536d1f6b560f1..605f81f3a16c4c 100644
--- a/src/sentry/grouping/ingest/seer.py
+++ b/src/sentry/grouping/ingest/seer.py
@@ -196,7 +196,6 @@ def get_seer_similar_issues(
"hash": event_hash,
"project_id": event.project.id,
"stacktrace": stacktrace_string,
- "message": filter_null_from_string(event.title),
"exception_type": filter_null_from_string(exception_type) if exception_type else None,
"k": num_neighbors,
"referrer": "ingest",
diff --git a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
index 0d298e1377b9b6..2a6334937d9f0a 100644
--- a/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
+++ b/src/sentry/issues/endpoints/group_similar_issues_embeddings.py
@@ -82,7 +82,6 @@ def get(self, request: Request, group) -> Response:
"hash": latest_event.get_primary_hash(),
"project_id": group.project.id,
"stacktrace": stacktrace_string,
- "message": latest_event.title,
"exception_type": get_path(latest_event.data, "exception", "values", -1, "type"),
"read_only": True,
"referrer": "similar_issues",
@@ -99,7 +98,6 @@ def get(self, request: Request, group) -> Response:
similar_issues_params["use_reranking"] = request.GET["useReranking"] == "true"
extra: dict[str, Any] = dict(similar_issues_params.copy())
- extra["group_message"] = extra.pop("message")
logger.info("Similar issues embeddings parameters", extra=extra)
results = get_similarity_data_from_seer(similar_issues_params)
diff --git a/src/sentry/seer/similarity/similar_issues.py b/src/sentry/seer/similarity/similar_issues.py
index 38cede5c04ea04..b46c7c10e024d3 100644
--- a/src/sentry/seer/similarity/similar_issues.py
+++ b/src/sentry/seer/similarity/similar_issues.py
@@ -47,11 +47,8 @@ def get_similarity_data_from_seer(
logger_extra = apply_key_filter(
similar_issues_request,
- keep_keys=["event_id", "project_id", "message", "hash", "referrer", "use_reranking"],
+ keep_keys=["event_id", "project_id", "hash", "referrer", "use_reranking"],
)
- # We have to rename the key `message` because it conflicts with the `LogRecord` attribute of the
- # same name
- logger_extra["message_value"] = logger_extra.pop("message", None)
logger.info(
"get_seer_similar_issues.request",
extra=logger_extra,
diff --git a/src/sentry/seer/similarity/types.py b/src/sentry/seer/similarity/types.py
index 0477cf9b642b19..41d67bf18f3dd7 100644
--- a/src/sentry/seer/similarity/types.py
+++ b/src/sentry/seer/similarity/types.py
@@ -24,7 +24,6 @@ class SimilarHashMissingGroupError(Exception):
class SimilarIssuesEmbeddingsRequest(TypedDict):
project_id: int
stacktrace: str
- message: str
exception_type: str | None
hash: str
k: NotRequired[int] # how many neighbors to find
diff --git a/tests/sentry/grouping/ingest/test_seer.py b/tests/sentry/grouping/ingest/test_seer.py
index 06daebb77deae9..63e2b3674e0876 100644
--- a/tests/sentry/grouping/ingest/test_seer.py
+++ b/tests/sentry/grouping/ingest/test_seer.py
@@ -216,7 +216,6 @@ def test_sends_expected_data_to_seer(self, mock_get_similarity_data: MagicMock):
"hash": new_event.get_primary_hash(),
"project_id": self.project.id,
"stacktrace": f'{type}: {value}\n File "dogpark.py", function play_fetch\n {context_line}',
- "message": "FailedToFetchError('Charlie didn't bring the ball back')",
"exception_type": "FailedToFetchError",
"k": 1,
"referrer": "ingest",
diff --git a/tests/sentry/issues/endpoints/test_group_similar_issues_embeddings.py b/tests/sentry/issues/endpoints/test_group_similar_issues_embeddings.py
index 553894baac01bb..8336980913787d 100644
--- a/tests/sentry/issues/endpoints/test_group_similar_issues_embeddings.py
+++ b/tests/sentry/issues/endpoints/test_group_similar_issues_embeddings.py
@@ -224,7 +224,6 @@ def test_simple(self, mock_logger, mock_seer_request, mock_metrics_incr):
"hash": NonNone(self.event.get_primary_hash()),
"project_id": self.project.id,
"stacktrace": EXPECTED_STACKTRACE_STRING,
- "message": self.group.get_latest_event().title,
"exception_type": "ZeroDivisionError",
"read_only": True,
"referrer": "similar_issues",
@@ -239,7 +238,6 @@ def test_simple(self, mock_logger, mock_seer_request, mock_metrics_incr):
headers={"content-type": "application/json;charset=utf-8"},
)
- expected_seer_request_params["group_message"] = expected_seer_request_params.pop("message")
mock_logger.info.assert_called_with(
"Similar issues embeddings parameters", extra=expected_seer_request_params
)
@@ -342,7 +340,6 @@ def test_incomplete_return_data(self, mock_seer_request, mock_logger, mock_metri
"hash": NonNone(self.event.get_primary_hash()),
"project_id": self.project.id,
"stacktrace": EXPECTED_STACKTRACE_STRING,
- "message": self.group.get_latest_event().title,
"exception_type": "ZeroDivisionError",
"read_only": True,
"referrer": "similar_issues",
@@ -614,7 +611,6 @@ def test_no_optional_params(self, mock_seer_request):
"hash": NonNone(self.event.get_primary_hash()),
"project_id": self.project.id,
"stacktrace": EXPECTED_STACKTRACE_STRING,
- "message": self.group.get_latest_event().title,
"exception_type": "ZeroDivisionError",
"read_only": True,
"referrer": "similar_issues",
@@ -643,7 +639,6 @@ def test_no_optional_params(self, mock_seer_request):
"hash": NonNone(self.event.get_primary_hash()),
"project_id": self.project.id,
"stacktrace": EXPECTED_STACKTRACE_STRING,
- "message": self.group.get_latest_event().title,
"exception_type": "ZeroDivisionError",
"read_only": True,
"referrer": "similar_issues",
@@ -673,7 +668,6 @@ def test_no_optional_params(self, mock_seer_request):
"hash": NonNone(self.event.get_primary_hash()),
"project_id": self.project.id,
"stacktrace": EXPECTED_STACKTRACE_STRING,
- "message": self.group.get_latest_event().title,
"exception_type": "ZeroDivisionError",
"read_only": True,
"referrer": "similar_issues",
diff --git a/tests/sentry/seer/similarity/test_similar_issues.py b/tests/sentry/seer/similarity/test_similar_issues.py
index 21be8c08ad3f12..71a7b43ca95ced 100644
--- a/tests/sentry/seer/similarity/test_similar_issues.py
+++ b/tests/sentry/seer/similarity/test_similar_issues.py
@@ -32,7 +32,6 @@ def setUp(self):
"hash": "11212012123120120415201309082013",
"project_id": self.project.id,
"stacktrace": "",
- "message": "FailedToFetchError('Charlie didn't bring the ball back')",
"exception_type": "FailedToFetchError",
}
@@ -222,7 +221,6 @@ def test_request_error(
"event_id": "12312012041520130908201311212012",
"hash": "11212012123120120415201309082013",
"project_id": self.project.id,
- "message_value": "FailedToFetchError('Charlie didn't bring the ball back')",
},
)
mock_metrics_incr.assert_any_call(
From 52fbd94d58328ce71a9e37100bbd3f2b481a9dbd Mon Sep 17 00:00:00 2001
From: Filippo Pacifici
Date: Thu, 3 Oct 2024 13:30:55 -0700
Subject: [PATCH 095/139] Clean up the support for split queues (#78274)
#76410 introduces a way to deliver messages to a set of split celery
queues rather than relying on a single queue for
post process.
This was used to address an incident as we were saturating Rabbit
resources
for a single queue (which is single threaded). Splitting messages across
multiple
queues solves that problem.
This PR introduce a configurable support for split queues for scenarios
where we pass
the queue name to `apply_async`.
There is another PR to deal with tasks that define the queue in the task
definition:
https://github.com/getsentry/sentry/pull/76494
This introduces a router class SplitQueueRouter that maps a queue to a
list of split
queues picked in a round robin way.
This router is used by the client code that schedules a task with
apply_async.
The configuration is held in CELERY_SPLIT_QUEUE_ROUTES and all the
split queues have to be declared as any other queue.
Rollout procedure to remove the hack from #76410
- Merge this PR, now all the infra is in place. What was added by
feat(postprocess): INC-855 allow to split post_process_* queues #76410
is still working because the two options added to manage the router have
a default value
https://github.com/getsentry/sentry/pull/76494/files#diff-c4bf5bc2eae9efe5e948b5deda98c34fafd95674ae376aaf790c21a039e59f91R2699-R2706.
SO the router falls back to the config added by Michal.
- Update the config in the production regions to add the three queues to
CELERY_SPLIT_QUEUE_ROUTES.
- Switch the celery_split_queue_legacy_mode option to stop using the
legacy config
- Remove the legacy config and use the router only.
---
src/sentry/conf/server.py | 14 ++-
src/sentry/conf/types/celery.py | 19 ++++
src/sentry/eventstream/base.py | 10 +-
src/sentry/eventstream/kafka/backend.py | 1 +
src/sentry/options/defaults.py | 12 +++
src/sentry/queue/routers.py | 68 ++++++++++++++
src/sentry/utils/celery.py | 53 +++++++++++
tests/sentry/eventstream/test_eventstream.py | 43 ++++-----
tests/sentry/queue/test_routers.py | 96 ++++++++++++++++++++
tests/sentry/utils/test_celery.py | 56 ++++++++++++
10 files changed, 342 insertions(+), 30 deletions(-)
create mode 100644 src/sentry/conf/types/celery.py
create mode 100644 src/sentry/queue/routers.py
create mode 100644 tests/sentry/queue/test_routers.py
create mode 100644 tests/sentry/utils/test_celery.py
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index 252ee023fd943b..b9e2e0fe4724f3 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -20,6 +20,7 @@
from sentry.conf.api_pagination_allowlist_do_not_modify import (
SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY,
)
+from sentry.conf.types.celery import SplitQueueSize
from sentry.conf.types.kafka_definition import ConsumerDefinition
from sentry.conf.types.logging_config import LoggingConfig
from sentry.conf.types.role_dict import RoleDict
@@ -822,6 +823,15 @@ def SOCIAL_AUTH_DEFAULT_USERNAME() -> str:
"sentry.integrations.tasks",
)
+# tmp(michal): Default configuration for post_process* queues split
+SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER: dict[str, Callable[[], str]] = {}
+
+# Mapping from queue name to split queues to be used by SplitQueueRouter.
+# This is meant to be used in those case where we have to specify the
+# queue name when issuing a task. Example: post process.
+CELERY_SPLIT_QUEUE_ROUTES: Mapping[str, SplitQueueSize] = {}
+
+
default_exchange = Exchange("default", type="direct")
control_exchange = default_exchange
@@ -3517,7 +3527,3 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_WEB_PORT = int(bind[1])
CELERYBEAT_SCHEDULE_FILENAME = f"celerybeat-schedule-{SILO_MODE}"
-
-
-# tmp(michal): Default configuration for post_process* queueus split
-SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER: dict[str, Callable[[], str]] = {}
diff --git a/src/sentry/conf/types/celery.py b/src/sentry/conf/types/celery.py
new file mode 100644
index 00000000000000..17026728ec8252
--- /dev/null
+++ b/src/sentry/conf/types/celery.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+from typing import TypedDict
+
+
+class SplitQueueSize(TypedDict):
+ # The total number of queues to create to split a single queue.
+ # This number triggers the creation of the queues themselves
+ # when the application starts.
+ total: int
+ # The number of queues to actually use. It has to be smaller or
+ # equal to `total`.
+ # This is the number of queues the router uses when the split
+ # is enable on this queue.
+ # This number exists in order to be able to safely increase or
+ # decrease the number of queues as the queues have to be created
+ # first, then we have to start consuming from them, only then
+ # we can start producing.
+ in_use: int
diff --git a/src/sentry/eventstream/base.py b/src/sentry/eventstream/base.py
index dc7a7dc411fb03..ac505a9a7a215b 100644
--- a/src/sentry/eventstream/base.py
+++ b/src/sentry/eventstream/base.py
@@ -6,9 +6,8 @@
from enum import Enum
from typing import TYPE_CHECKING, Any, Optional, TypedDict, cast
-from django.conf import settings
-
from sentry.issues.issue_occurrence import IssueOccurrence
+from sentry.queue.routers import SplitQueueRouter
from sentry.tasks.post_process import post_process_group
from sentry.utils.cache import cache_key_for_event
from sentry.utils.services import Service
@@ -65,6 +64,9 @@ class EventStream(Service):
"_get_event_type",
)
+ def __init__(self, **options: Any) -> None:
+ self.__celery_router = SplitQueueRouter()
+
def _dispatch_post_process_group_task(
self,
event_id: str,
@@ -108,9 +110,7 @@ def _get_queue_for_post_process(self, event: Event | GroupEvent) -> str:
else:
default_queue = "post_process_errors"
- return settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER.get(
- default_queue, lambda: default_queue
- )()
+ return self.__celery_router.route_for_queue(default_queue)
def _get_occurrence_data(self, event: Event | GroupEvent) -> MutableMapping[str, Any]:
occurrence = cast(Optional[IssueOccurrence], getattr(event, "occurrence", None))
diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py
index 8dc599d10a4575..d89524cc2b36e2 100644
--- a/src/sentry/eventstream/kafka/backend.py
+++ b/src/sentry/eventstream/kafka/backend.py
@@ -25,6 +25,7 @@
class KafkaEventStream(SnubaProtocolEventStream):
def __init__(self, **options: Any) -> None:
+ super().__init__(**options)
self.topic = Topic.EVENTS
self.transactions_topic = Topic.TRANSACTIONS
self.issue_platform_topic = Topic.EVENTSTREAM_GENERIC
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 83572f3dc0471f..5acba417775549 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -2748,6 +2748,18 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+register(
+ "celery_split_queue_legacy_mode",
+ default=["post_process_transactions"],
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
+register(
+ "celery_split_queue_rollout",
+ default={"post_process_transactions": 1.0},
+ flags=FLAG_AUTOMATOR_MODIFIABLE,
+)
+
# Secret Scanning. Allows to temporarily disable signature verification.
register(
"secret-scanning.github.enable-signature-verification",
diff --git a/src/sentry/queue/routers.py b/src/sentry/queue/routers.py
new file mode 100644
index 00000000000000..f0b13dc5e504b8
--- /dev/null
+++ b/src/sentry/queue/routers.py
@@ -0,0 +1,68 @@
+import logging
+import random
+from collections.abc import Sequence
+from itertools import cycle
+
+from django.conf import settings
+
+from sentry import options
+from sentry.celery import app
+from sentry.utils.celery import build_queue_names
+
+logger = logging.getLogger(__name__)
+
+
+def _get_known_queues() -> set[str]:
+ return {c_queue.name for c_queue in app.conf.CELERY_QUEUES}
+
+
+def _validate_destinations(destinations: Sequence[str]) -> None:
+ for dest in destinations:
+ assert dest in _get_known_queues(), f"Queue {dest} in split queue config is not declared."
+
+
+class SplitQueueRouter:
+ """
+ Returns the split queue to use for a Celery queue.
+ Split queues allow us to spread the load of a queue to multiple ones.
+ This takes in input a queue name and returns the split. It is supposed
+ to be used by the code that schedules the task.
+ Each split queue can be individually rolled out via options.
+ WARNING: Do not forget to configure your workers to listen to the
+ queues appropriately before you start routing messages.
+ """
+
+ def __init__(self) -> None:
+ known_queues = _get_known_queues()
+ self.__queue_routers = {}
+ for source, dest_config in settings.CELERY_SPLIT_QUEUE_ROUTES.items():
+ assert source in known_queues, f"Queue {source} in split queue config is not declared."
+ assert dest_config["in_use"] <= dest_config["total"]
+
+ if dest_config["in_use"] >= 2:
+ destinations = build_queue_names(source, dest_config["in_use"])
+ _validate_destinations(destinations)
+ self.__queue_routers[source] = cycle(destinations)
+ else:
+ logger.error(
+ "Invalid configuration for queue %s. In use is not greater than 1: %d. Fall back to source",
+ source,
+ dest_config["in_use"],
+ )
+
+ def route_for_queue(self, queue: str) -> str:
+ rollout_rate = options.get("celery_split_queue_rollout").get(queue, 0.0)
+ if random.random() >= rollout_rate:
+ return queue
+
+ if queue in set(options.get("celery_split_queue_legacy_mode")):
+ # Use legacy route
+ # This router required to define the routing logic inside the
+ # settings file.
+ return settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER.get(queue, lambda: queue)()
+ else:
+ router = self.__queue_routers.get(queue)
+ if router is not None:
+ return next(router)
+ else:
+ return queue
diff --git a/src/sentry/utils/celery.py b/src/sentry/utils/celery.py
index 05e27fec298bf7..f79fdce55573f0 100644
--- a/src/sentry/utils/celery.py
+++ b/src/sentry/utils/celery.py
@@ -1,9 +1,62 @@
+from collections.abc import Mapping, MutableSequence, Sequence
from random import randint
from typing import Any
from celery.schedules import crontab
+from kombu import Queue
+
+from sentry.conf.types.celery import SplitQueueSize
def crontab_with_minute_jitter(*args: Any, **kwargs: Any) -> crontab:
kwargs["minute"] = randint(0, 59)
return crontab(*args, **kwargs)
+
+
+def build_queue_names(base_name: str, quantity: int) -> Sequence[str]:
+ ret = []
+ for index in range(quantity):
+ name = f"{base_name}_{index + 1}"
+ ret.append(name)
+ return ret
+
+
+def make_split_queues(config: Mapping[str, SplitQueueSize]) -> Sequence[Queue]:
+ """
+ Generates the split queue definitions from the mapping between
+ base queue and split queue config.
+ """
+ ret: MutableSequence[Queue] = []
+ for base_name, conf in config.items():
+ names = [
+ Queue(name=name, routing_key=name)
+ for name in build_queue_names(base_name, conf["total"])
+ ]
+ ret.extend(names)
+
+ return ret
+
+
+def safe_append(queues: MutableSequence[Queue], queue: Queue) -> None:
+ """
+ We define queues as lists in the configuration and we allow override
+ of the config per environment.
+ Unfortunately if you add twice a queue with the same name to the celery
+ config. Celery just creates the queue twice. This can be an undesired behavior
+ depending on the Celery backend. So this method allows to add queues to
+ a list without duplications.
+ """
+ existing_queue_names = {q.name for q in queues}
+ if queue.name not in existing_queue_names:
+ queues.append(queue)
+
+
+def safe_extend(queues: MutableSequence[Queue], to_add: Sequence[Queue]) -> None:
+ """
+ Like `safe_append` but it works like extend adding multiple queues
+ to the config.
+ """
+ existing_queue_names = {q.name for q in queues}
+ for q in to_add:
+ if q.name not in existing_queue_names:
+ queues.append(q)
diff --git a/tests/sentry/eventstream/test_eventstream.py b/tests/sentry/eventstream/test_eventstream.py
index e5f4a19f9c278b..0dca159b59603a 100644
--- a/tests/sentry/eventstream/test_eventstream.py
+++ b/tests/sentry/eventstream/test_eventstream.py
@@ -5,7 +5,6 @@
from unittest.mock import Mock, patch
import pytest
-from django.conf import settings
from django.test import override_settings
from django.utils import timezone
from snuba_sdk import Column, Condition, Entity, Op, Query, Request
@@ -341,18 +340,7 @@ def test_transaction_queue(self, mock_eventstream_insert):
@override_settings()
@patch("sentry.eventstream.backend.insert", autospec=True)
- def test_queue_split_router(self, mock_eventstream_insert):
- queues = [
- "post_process_transactions-1",
- "post_process_transactions-2",
- "post_process_transactions-3",
- ]
- queues_gen = itertools.cycle(queues)
-
- settings.SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER = {
- "post_process_transactions": lambda: next(queues_gen)
- }
-
+ def test_queue_legacy_split_router(self, mock_eventstream_insert):
event = self.__build_transaction_event()
event.group_id = None
event.groups = [self.group]
@@ -371,14 +359,27 @@ def test_queue_split_router(self, mock_eventstream_insert):
"group_states": [{"id": event.groups[0].id, **group_state}],
}
- headers, body = self.__produce_payload(*insert_args, **insert_kwargs)
- assert body["queue"] == "post_process_transactions-1"
- headers, body = self.__produce_payload(*insert_args, **insert_kwargs)
- assert body["queue"] == "post_process_transactions-2"
- headers, body = self.__produce_payload(*insert_args, **insert_kwargs)
- assert body["queue"] == "post_process_transactions-3"
- headers, body = self.__produce_payload(*insert_args, **insert_kwargs)
- assert body["queue"] == "post_process_transactions-1"
+ queues_gen = itertools.cycle(
+ [
+ "post_process_transactions_1",
+ "post_process_transactions_2",
+ "post_process_transactions_3",
+ ]
+ )
+
+ with override_settings(
+ SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER={
+ "post_process_transactions": lambda: next(queues_gen)
+ }
+ ):
+ _, body = self.__produce_payload(*insert_args, **insert_kwargs)
+ assert body["queue"] == "post_process_transactions_1"
+ _, body = self.__produce_payload(*insert_args, **insert_kwargs)
+ assert body["queue"] == "post_process_transactions_2"
+ _, body = self.__produce_payload(*insert_args, **insert_kwargs)
+ assert body["queue"] == "post_process_transactions_3"
+ _, body = self.__produce_payload(*insert_args, **insert_kwargs)
+ assert body["queue"] == "post_process_transactions_1"
# test default assignment
insert_kwargs = {
diff --git a/tests/sentry/queue/test_routers.py b/tests/sentry/queue/test_routers.py
new file mode 100644
index 00000000000000..dc7668e6748b45
--- /dev/null
+++ b/tests/sentry/queue/test_routers.py
@@ -0,0 +1,96 @@
+import itertools
+from collections.abc import Mapping
+
+from django.conf import settings
+from django.test import override_settings
+
+from sentry.conf.types.celery import SplitQueueSize
+from sentry.queue.routers import SplitQueueRouter
+from sentry.testutils.helpers.options import override_options
+from sentry.testutils.pytest.fixtures import django_db_all
+from sentry.utils.celery import make_split_queues
+
+
+@django_db_all
+def test_split_router_legacy() -> None:
+ queues = [
+ "post_process_transactions_1",
+ "post_process_transactions_2",
+ "post_process_transactions_3",
+ ]
+ queues_gen = itertools.cycle(queues)
+ with override_settings(
+ SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER={
+ "post_process_transactions": lambda: next(queues_gen),
+ },
+ CELERY_SPLIT_QUEUE_ROUTES={},
+ ):
+ router = SplitQueueRouter()
+ assert router.route_for_queue("save_event") == "save_event"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_1"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_2"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_3"
+
+ with override_options({"celery_split_queue_legacy_mode": []}):
+ # Disabled legacy mode. As the split queue config is not there
+ # split queue does not happen/
+ router = SplitQueueRouter()
+ assert (
+ router.route_for_queue("post_process_transactions") == "post_process_transactions"
+ )
+
+
+CELERY_SPLIT_QUEUE_ROUTES: Mapping[str, SplitQueueSize] = {
+ "post_process_transactions": {"total": 5, "in_use": 3},
+ "post_process_errors": {"total": 5, "in_use": 1},
+}
+
+
+@django_db_all
+@override_settings(
+ SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER={},
+ CELERY_SPLIT_QUEUE_ROUTES=CELERY_SPLIT_QUEUE_ROUTES,
+ CELERY_QUEUES=[
+ *settings.CELERY_QUEUES,
+ *make_split_queues(CELERY_SPLIT_QUEUE_ROUTES),
+ ],
+)
+@override_options(
+ {
+ "celery_split_queue_rollout": {
+ "post_process_transactions": 0.0,
+ },
+ }
+)
+def test_router_not_rolled_out() -> None:
+ router = SplitQueueRouter()
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions"
+
+
+@django_db_all
+@override_settings(
+ SENTRY_POST_PROCESS_QUEUE_SPLIT_ROUTER={},
+ CELERY_SPLIT_QUEUE_ROUTES=CELERY_SPLIT_QUEUE_ROUTES,
+ CELERY_QUEUES=[
+ *settings.CELERY_QUEUES,
+ *make_split_queues(CELERY_SPLIT_QUEUE_ROUTES),
+ ],
+)
+@override_options(
+ {
+ "celery_split_queue_legacy_mode": [],
+ "celery_split_queue_rollout": {
+ "post_process_transactions": 1.0,
+ "post_process_errors": 1.0,
+ },
+ }
+)
+def test_router_rolled_out() -> None:
+ router = SplitQueueRouter()
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_1"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_2"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_3"
+ assert router.route_for_queue("post_process_transactions") == "post_process_transactions_1"
+ # Here the queue is disabled because the config contained in_use = 1
+ assert router.route_for_queue("post_process_errors") == "post_process_errors"
+ assert router.route_for_queue("post_process_issue_platform") == "post_process_issue_platform"
diff --git a/tests/sentry/utils/test_celery.py b/tests/sentry/utils/test_celery.py
new file mode 100644
index 00000000000000..b8c577ed3a5c8d
--- /dev/null
+++ b/tests/sentry/utils/test_celery.py
@@ -0,0 +1,56 @@
+from kombu import Queue
+
+from sentry.utils.celery import build_queue_names, make_split_queues, safe_append, safe_extend
+
+
+def test_split_queue() -> None:
+ assert make_split_queues(
+ {
+ "my_queue": {"total": 3, "in_use": 1},
+ "my_other_queue": {"total": 1, "in_use": 1},
+ }
+ ) == [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ Queue(name="my_queue_2", routing_key="my_queue_2"),
+ Queue(name="my_queue_3", routing_key="my_queue_3"),
+ Queue(name="my_other_queue_1", routing_key="my_other_queue_1"),
+ ]
+
+
+def test_build_names() -> None:
+ assert build_queue_names("my_queue", 3) == ["my_queue_1", "my_queue_2", "my_queue_3"]
+
+
+def test_safe_append() -> None:
+ queues = [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ Queue(name="my_queue_2", routing_key="my_queue_2"),
+ ]
+
+ safe_append(queues, Queue(name="my_queue_1", routing_key="my_queue_1"))
+ safe_append(queues, Queue(name="my_queue_3", routing_key="my_queue_3"))
+
+ assert queues == [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ Queue(name="my_queue_2", routing_key="my_queue_2"),
+ Queue(name="my_queue_3", routing_key="my_queue_3"),
+ ]
+
+
+def test_safe_extend() -> None:
+ queues = [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ ]
+
+ safe_extend(
+ queues,
+ [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ Queue(name="my_queue_2", routing_key="my_queue_2"),
+ ],
+ )
+
+ assert queues == [
+ Queue(name="my_queue_1", routing_key="my_queue_1"),
+ Queue(name="my_queue_2", routing_key="my_queue_2"),
+ ]
From a5f498fbbfa64dde7085f10ff36001a61f81032b Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Thu, 3 Oct 2024 13:32:15 -0700
Subject: [PATCH 096/139] feat(snuba/replay): bump snuba-sdk ver. and disable
sampling in replay tag key queries (#78534)
This lets us query OrganizationTagsEndpoint with dataset=replays.
- Use the newest snuba-sdk release, with change made in
https://github.com/getsentry/snuba-sdk/pull/197.
- disable sampling optimization ("turbo") as replays table doesn't
support it.
- adds a test which I verified is working with snuba devserver, on
branch https://github.com/getsentry/snuba/pull/6378
Ref https://github.com/getsentry/sentry/issues/78531
-> depends on https://github.com/getsentry/snuba/pull/6378.
---
requirements-base.txt | 2 +-
requirements-dev-frozen.txt | 2 +-
requirements-frozen.txt | 2 +-
src/sentry/tagstore/snuba/backend.py | 5 ++
.../api/endpoints/test_organization_tags.py | 64 ++++++++++++++++++-
5 files changed, 71 insertions(+), 4 deletions(-)
diff --git a/requirements-base.txt b/requirements-base.txt
index 1e1c4ad18850df..41c82118476ca1 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -72,7 +72,7 @@ sentry-redis-tools>=0.1.7
sentry-relay>=0.9.2
sentry-sdk>=2.12.0
slack-sdk>=3.27.2
-snuba-sdk>=3.0.38
+snuba-sdk>=3.0.40
simplejson>=3.17.6
sqlparse>=0.4.4
statsd>=3.3
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index 37791bbcc4d664..d1cf1d4bd3bb60 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -193,7 +193,7 @@ simplejson==3.17.6
six==1.16.0
slack-sdk==3.27.2
sniffio==1.2.0
-snuba-sdk==3.0.39
+snuba-sdk==3.0.40
sortedcontainers==2.4.0
soupsieve==2.3.2.post1
sqlparse==0.5.0
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 888fda53233efe..e02625e9f78aa6 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -134,7 +134,7 @@ simplejson==3.17.6
six==1.16.0
slack-sdk==3.27.2
sniffio==1.3.0
-snuba-sdk==3.0.39
+snuba-sdk==3.0.40
soupsieve==2.3.2.post1
sqlparse==0.5.0
statsd==3.3.0
diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py
index e7839bd3622f8d..bf9922b42b5b94 100644
--- a/src/sentry/tagstore/snuba/backend.py
+++ b/src/sentry/tagstore/snuba/backend.py
@@ -441,6 +441,11 @@ def get_tag_keys_for_projects(
# So only disable sampling if the timerange is short enough.
if len(projects) <= max_unsampled_projects and end - start <= timedelta(days=14):
optimize_kwargs["sample"] = 1
+
+ # Replays doesn't support sampling.
+ if dataset == Dataset.Replays:
+ optimize_kwargs = {}
+
return self.__get_tag_keys_for_projects(
projects,
None,
diff --git a/tests/snuba/api/endpoints/test_organization_tags.py b/tests/snuba/api/endpoints/test_organization_tags.py
index 5431dcf681b74f..f3bce93ff3f786 100644
--- a/tests/snuba/api/endpoints/test_organization_tags.py
+++ b/tests/snuba/api/endpoints/test_organization_tags.py
@@ -4,7 +4,8 @@
from django.urls import reverse
from rest_framework.exceptions import ErrorDetail
-from sentry.testutils.cases import APITestCase, SnubaTestCase
+from sentry.replays.testutils import mock_replay
+from sentry.testutils.cases import APITestCase, ReplaysSnubaTestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
from sentry.utils.samples import load_data
from tests.sentry.issues.test_utils import OccurrenceTestMixin
@@ -385,3 +386,64 @@ def test_different_times_retrieves_cache(self):
cached_data = response.data
assert original_data == cached_data
+
+
+class ReplayOrganizationTagsTest(APITestCase, ReplaysSnubaTestCase):
+ def test_dataset_replays(self):
+ self.login_as(user=self.user)
+ replay1_id = uuid.uuid4().hex
+ replay2_id = uuid.uuid4().hex
+ replay3_id = uuid.uuid4().hex
+ self.r1_seq0_timestamp = before_now(seconds=22)
+ self.r1_seq1_timestamp = before_now(seconds=15)
+ self.r2_seq0_timestamp = before_now(seconds=10)
+ self.r3_seq0_timestamp = before_now(seconds=10)
+ self.store_replays(
+ mock_replay(
+ self.r1_seq0_timestamp,
+ self.project.id,
+ replay1_id,
+ tags={"fruit": "orange"},
+ segment_id=0,
+ ),
+ )
+ self.store_replays(
+ mock_replay(
+ self.r1_seq1_timestamp,
+ self.project.id,
+ replay1_id,
+ tags={"fruit": "orange"},
+ segment_id=1,
+ ),
+ )
+
+ self.store_replays(
+ mock_replay(
+ self.r2_seq0_timestamp,
+ self.project.id,
+ replay2_id,
+ tags={"fruit": "orange"},
+ )
+ )
+ self.store_replays(
+ mock_replay(
+ self.r3_seq0_timestamp,
+ self.project.id,
+ replay3_id,
+ tags={"fruit": "apple", "drink": "water"},
+ )
+ )
+
+ url = reverse(
+ "sentry-api-0-organization-tags",
+ kwargs={"organization_id_or_slug": self.organization.slug},
+ )
+ response = self.client.get(url, {"statsPeriod": "14d", "dataset": "replays"}, format="json")
+
+ assert response.status_code == 200, response.content
+ data = response.data
+ data.sort(key=lambda val: val["name"])
+ assert data == [
+ {"key": "drink", "name": "Drink", "totalValues": 1},
+ {"key": "fruit", "name": "Fruit", "totalValues": 4},
+ ]
From f45b504f5630a1a484144729b19846378e262995 Mon Sep 17 00:00:00 2001
From: Ryan Skonnord
Date: Thu, 3 Oct 2024 13:44:05 -0700
Subject: [PATCH 097/139] feat(integrations): Introduce common abstraction for
messaging SLOs (#78482)
Introduce EventLifecycle and EventLifecycleMetric to capture
success/failure metrics, with the purpose of enforcing a consistent key
scheme across different integrations.
Introduce MessagingInteractionEvent as the first EventLifecycleMetric
subclass, to unify messaging integrations. Represent distinct messaging
features or behaviors with the MessagingInteractionType, whose values
should eventually map one-to-one onto desired messaging SLOs.
In the various messaging integrations, capture an initial set of those
events represented by MessagingInteractionType values.
---
.../integrations/discord/webhooks/command.py | 6 +
.../discord/webhooks/message_component.py | 34 +++-
src/sentry/integrations/messaging/commands.py | 88 ++++++--
src/sentry/integrations/messaging/metrics.py | 64 ++++++
src/sentry/integrations/msteams/webhook.py | 50 +++--
.../integrations/slack/webhooks/action.py | 32 ++-
.../integrations/slack/webhooks/base.py | 6 +
src/sentry/integrations/utils/metrics.py | 191 ++++++++++++++++++
.../discord/webhooks/test_command.py | 9 +-
.../webhooks/test_message_component.py | 10 +-
.../msteams/test_action_state_change.py | 9 +-
.../integrations/msteams/test_webhook.py | 9 +-
.../slack/webhooks/actions/test_status.py | 9 +-
.../slack/webhooks/commands/test_link_team.py | 11 +-
.../slack/webhooks/commands/test_link_user.py | 11 +-
.../slack/webhooks/events/test_message_im.py | 9 +-
16 files changed, 487 insertions(+), 61 deletions(-)
create mode 100644 src/sentry/integrations/messaging/metrics.py
create mode 100644 src/sentry/integrations/utils/metrics.py
diff --git a/src/sentry/integrations/discord/webhooks/command.py b/src/sentry/integrations/discord/webhooks/command.py
index b5a5dcc16ae381..471b7bdcbead1d 100644
--- a/src/sentry/integrations/discord/webhooks/command.py
+++ b/src/sentry/integrations/discord/webhooks/command.py
@@ -4,6 +4,7 @@
from rest_framework.response import Response
from sentry.integrations.discord.requests.base import DiscordRequest
+from sentry.integrations.discord.spec import DiscordMessagingSpec
from sentry.integrations.discord.utils import logger
from sentry.integrations.discord.views.link_identity import build_linking_url
from sentry.integrations.discord.views.unlink_identity import build_unlinking_url
@@ -15,6 +16,7 @@
MessagingIntegrationCommand,
MessagingIntegrationCommandDispatcher,
)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
LINK_USER_MESSAGE = "[Click here]({url}) to link your Discord account to your Sentry account."
ALREADY_LINKED_MESSAGE = "You are already linked to the Sentry account with email: `{email}`."
@@ -59,6 +61,10 @@ def handle(self) -> Response:
class DiscordCommandDispatcher(MessagingIntegrationCommandDispatcher[str]):
request: DiscordRequest
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return DiscordMessagingSpec()
+
@property
def command_handlers(
self,
diff --git a/src/sentry/integrations/discord/webhooks/message_component.py b/src/sentry/integrations/discord/webhooks/message_component.py
index de920038248619..39d58be7d61ae2 100644
--- a/src/sentry/integrations/discord/webhooks/message_component.py
+++ b/src/sentry/integrations/discord/webhooks/message_component.py
@@ -18,7 +18,12 @@
)
from sentry.integrations.discord.message_builder.base.flags import DiscordMessageFlags
from sentry.integrations.discord.requests.base import DiscordRequest
+from sentry.integrations.discord.spec import DiscordMessagingSpec
from sentry.integrations.discord.webhooks.handler import DiscordInteractionHandler
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.models.activity import ActivityIntegration
from sentry.models.group import Group
from sentry.models.grouphistory import STATUS_TO_STRING_LOOKUP, GroupHistoryStatus
@@ -85,36 +90,51 @@ def handle(self) -> Response:
)
return self.send_message(NOT_IN_ORG)
+ def record_event(interaction_type: MessagingInteractionType) -> MessagingInteractionEvent:
+ return MessagingInteractionEvent(
+ interaction_type,
+ DiscordMessagingSpec(),
+ user=self.user,
+ organization=(self.group.organization if self.group else None),
+ )
+
if self.custom_id.startswith(CustomIds.ASSIGN_DIALOG):
logger.info("discord.interaction.component.assign_dialog", extra={**logging_data})
- return self.assign_dialog()
+ with record_event(MessagingInteractionType.ASSIGN_DIALOG).capture():
+ return self.assign_dialog()
elif self.custom_id.startswith(CustomIds.ASSIGN):
logger.info(
"discord.interaction.component.assign",
extra={**logging_data, "assign_to": self.request.get_selected_options()[0]},
)
- return self.assign()
+ with record_event(MessagingInteractionType.ASSIGN).capture():
+ return self.assign()
elif self.custom_id.startswith(CustomIds.RESOLVE_DIALOG):
logger.info("discord.interaction.component.resolve_dialog", extra={**logging_data})
- return self.resolve_dialog()
+ with record_event(MessagingInteractionType.RESOLVE_DIALOG).capture():
+ return self.resolve_dialog()
elif self.custom_id.startswith(CustomIds.RESOLVE):
logger.info("discord.interaction.component.resolve", extra={**logging_data})
- return self.resolve()
+ with record_event(MessagingInteractionType.RESOLVE).capture():
+ return self.resolve()
elif self.custom_id.startswith(CustomIds.UNRESOLVE):
logger.info("discord.interaction.component.unresolve", extra={**logging_data})
- return self.unresolve()
+ with record_event(MessagingInteractionType.UNRESOLVE).capture():
+ return self.unresolve()
elif self.custom_id.startswith(CustomIds.MARK_ONGOING):
logger.info("discord.interaction.component.mark_ongoing", extra={**logging_data})
- return self.unresolve(from_mark_ongoing=True)
+ with record_event(MessagingInteractionType.MARK_ONGOING).capture():
+ return self.unresolve(from_mark_ongoing=True)
elif self.custom_id.startswith(CustomIds.ARCHIVE):
logger.info("discord.interaction.component.archive", extra={**logging_data})
- return self.archive()
+ with record_event(MessagingInteractionType.ARCHIVE).capture():
+ return self.archive()
logger.warning("discord.interaction.component.unknown_custom_id", extra={**logging_data})
return self.send_message(INVALID_GROUP_ID)
diff --git a/src/sentry/integrations/messaging/commands.py b/src/sentry/integrations/messaging/commands.py
index 767ceadd59a0c9..32968a56e8ef24 100644
--- a/src/sentry/integrations/messaging/commands.py
+++ b/src/sentry/integrations/messaging/commands.py
@@ -4,6 +4,12 @@
from dataclasses import dataclass
from typing import Generic, TypeVar
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
+
@dataclass(frozen=True, eq=True)
class CommandInput:
@@ -45,12 +51,21 @@ def __repr__(self):
class MessagingIntegrationCommand:
- def __init__(self, name: str, command_text: str, aliases: Iterable[str] = ()) -> None:
+ def __init__(
+ self,
+ interaction_type: MessagingInteractionType,
+ command_text: str,
+ aliases: Iterable[str] = (),
+ ) -> None:
super().__init__()
- self.name = name
+ self.interaction_type = interaction_type
self.command_slug = CommandSlug(command_text)
self.aliases = frozenset(CommandSlug(alias) for alias in aliases)
+ @property
+ def name(self) -> str:
+ return self.interaction_type.value
+
@staticmethod
def _to_tokens(text: str) -> tuple[str, ...]:
return tuple(token.casefold() for token in text.strip().split())
@@ -61,11 +76,27 @@ def get_all_command_slugs(self) -> Iterable[CommandSlug]:
MESSAGING_INTEGRATION_COMMANDS = (
- HELP := MessagingIntegrationCommand("HELP", "help", aliases=("", "support", "docs")),
- LINK_IDENTITY := MessagingIntegrationCommand("LINK_IDENTITY", "link"),
- UNLINK_IDENTITY := MessagingIntegrationCommand("UNLINK_IDENTITY", "unlink"),
- LINK_TEAM := MessagingIntegrationCommand("LINK_TEAM", "link team"),
- UNLINK_TEAM := MessagingIntegrationCommand("UNLINK_TEAM", "unlink team"),
+ HELP := MessagingIntegrationCommand(
+ MessagingInteractionType.HELP,
+ "help",
+ aliases=("", "support", "docs"),
+ ),
+ LINK_IDENTITY := MessagingIntegrationCommand(
+ MessagingInteractionType.LINK_IDENTITY,
+ "link",
+ ),
+ UNLINK_IDENTITY := MessagingIntegrationCommand(
+ MessagingInteractionType.UNLINK_IDENTITY,
+ "unlink",
+ ),
+ LINK_TEAM := MessagingIntegrationCommand(
+ MessagingInteractionType.LINK_TEAM,
+ "link team",
+ ),
+ UNLINK_TEAM := MessagingIntegrationCommand(
+ MessagingInteractionType.UNLINK_TEAM,
+ "unlink team",
+ ),
)
R = TypeVar("R") # response
@@ -74,6 +105,11 @@ def get_all_command_slugs(self) -> Iterable[CommandSlug]:
class MessagingIntegrationCommandDispatcher(Generic[R], ABC):
"""The set of commands handled by one messaging integration."""
+ @property
+ @abstractmethod
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ raise NotImplementedError
+
@property
@abstractmethod
def command_handlers(
@@ -81,23 +117,35 @@ def command_handlers(
) -> Iterable[tuple[MessagingIntegrationCommand, Callable[[CommandInput], R]]]:
raise NotImplementedError
+ def get_event(self, command: MessagingIntegrationCommand) -> MessagingInteractionEvent:
+ return MessagingInteractionEvent(
+ interaction_type=command.interaction_type, spec=self.integration_spec
+ )
+
def dispatch(self, cmd_input: CommandInput) -> R:
+ @dataclass(frozen=True)
+ class CandidateHandler:
+ command: MessagingIntegrationCommand
+ slug: CommandSlug
+ callback: Callable[[CommandInput], R]
+
+ def parsing_order(self) -> int:
+ # Sort by descending length of arg tokens. If one slug is a prefix of
+ # another (e.g., "link" and "link team"), we must check for the longer
+ # one first.
+ return -len(self.slug.tokens)
+
candidate_handlers = [
- (slug, callback)
+ CandidateHandler(command, slug, callback)
for (command, callback) in self.command_handlers
for slug in command.get_all_command_slugs()
]
+ candidate_handlers.sort(key=CandidateHandler.parsing_order)
+
+ for handler in candidate_handlers:
+ if handler.slug.does_match(cmd_input):
+ arg_input = cmd_input.adjust(handler.slug)
+ with self.get_event(handler.command).capture(assume_success=False):
+ return handler.callback(arg_input)
- def parsing_order(handler: tuple[CommandSlug, Callable[[CommandInput], R]]) -> int:
- # Sort by descending length of arg tokens. If one slug is a prefix of
- # another (e.g., "link" and "link team"), we must check for the longer
- # one first.
- slug, _ = handler
- return -len(slug.tokens)
-
- candidate_handlers.sort(key=parsing_order)
- for (slug, callback) in candidate_handlers:
- if slug.does_match(cmd_input):
- arg_input = cmd_input.adjust(slug)
- return callback(arg_input)
raise CommandNotMatchedError(f"{cmd_input=!r}", cmd_input)
diff --git a/src/sentry/integrations/messaging/metrics.py b/src/sentry/integrations/messaging/metrics.py
new file mode 100644
index 00000000000000..13f0c72acbafcb
--- /dev/null
+++ b/src/sentry/integrations/messaging/metrics.py
@@ -0,0 +1,64 @@
+from collections.abc import Mapping
+from dataclasses import dataclass
+from enum import Enum
+from typing import Any
+
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
+from sentry.integrations.utils.metrics import EventLifecycleMetric, EventLifecycleOutcome
+from sentry.models.organization import Organization
+from sentry.organizations.services.organization import RpcOrganization
+from sentry.users.models import User
+from sentry.users.services.user import RpcUser
+
+
+class MessagingInteractionType(Enum):
+ """A way in which a user can interact with Sentry through a messaging app."""
+
+ # General interactions
+ HELP = "HELP"
+ LINK_IDENTITY = "LINK_IDENTITY"
+ UNLINK_IDENTITY = "UNLINK_IDENTITY"
+ LINK_TEAM = "LINK_TEAM"
+ UNLINK_TEAM = "UNLINK_TEAM"
+
+ # Interactions on Issues
+ STATUS = "STATUS"
+ ARCHIVE_DIALOG = "ARCHIVE_DIALOG"
+ ARCHIVE = "ARCHIVE"
+ ASSIGN_DIALOG = "ASSIGN_DIALOG"
+ ASSIGN = "ASSIGN"
+ UNASSIGN = "ASSIGN"
+ RESOLVE_DIALOG = "RESOLVE_DIALOG"
+ RESOLVE = "RESOLVE"
+ UNRESOLVE = "UNRESOLVE"
+ IGNORE = "IGNORE"
+ MARK_ONGOING = "MARK_ONGOING"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+@dataclass
+class MessagingInteractionEvent(EventLifecycleMetric):
+ """An instance to be recorded of a user interacting through a messaging app."""
+
+ interaction_type: MessagingInteractionType
+ spec: MessagingIntegrationSpec
+
+ # Optional attributes to populate extras
+ user: User | RpcUser | None = None
+ organization: Organization | RpcOrganization | None = None
+
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ return self.get_standard_key(
+ domain="messaging",
+ integration_name=self.spec.provider_slug,
+ interaction_type=str(self.interaction_type),
+ outcome=outcome,
+ )
+
+ def get_extras(self) -> Mapping[str, Any]:
+ return {
+ "user_id": (self.user.id if self.user else None),
+ "organization_id": (self.organization.id if self.organization else None),
+ }
diff --git a/src/sentry/integrations/msteams/webhook.py b/src/sentry/integrations/msteams/webhook.py
index 2fac342351a211..e7d7e37f9e9077 100644
--- a/src/sentry/integrations/msteams/webhook.py
+++ b/src/sentry/integrations/msteams/webhook.py
@@ -28,8 +28,13 @@
MessagingIntegrationCommand,
MessagingIntegrationCommandDispatcher,
)
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
from sentry.integrations.msteams import parsing
-from sentry.integrations.msteams.spec import PROVIDER
+from sentry.integrations.msteams.spec import PROVIDER, MsTeamsMessagingSpec
from sentry.integrations.services.integration import integration_service
from sentry.models.activity import ActivityIntegration
from sentry.models.apikey import ApiKey
@@ -455,22 +460,21 @@ def _make_action_data(self, data: Mapping[str, Any], user_id: int) -> dict[str,
action_data = {"assignedTo": ""}
return action_data
+ _ACTION_TYPES = {
+ ACTION_TYPE.RESOLVE: ("resolve", MessagingInteractionType.RESOLVE),
+ ACTION_TYPE.IGNORE: ("ignore", MessagingInteractionType.IGNORE),
+ ACTION_TYPE.ASSIGN: ("assign", MessagingInteractionType.ASSIGN),
+ ACTION_TYPE.UNRESOLVE: ("unresolve", MessagingInteractionType.UNRESOLVE),
+ ACTION_TYPE.UNASSIGN: ("unassign", MessagingInteractionType.UNASSIGN),
+ }
+
def _issue_state_change(self, group: Group, identity: RpcIdentity, data) -> Response:
event_write_key = ApiKey(
organization_id=group.project.organization_id, scope_list=["event:write"]
)
- # undoing the enum structure of ACTION_TYPE to
- # get a more sensible analytics_event
- action_types = {
- ACTION_TYPE.RESOLVE: "resolve",
- ACTION_TYPE.IGNORE: "ignore",
- ACTION_TYPE.ASSIGN: "assign",
- ACTION_TYPE.UNRESOLVE: "unresolve",
- ACTION_TYPE.UNASSIGN: "unassign",
- }
action_data = self._make_action_data(data, identity.user_id)
- status = action_types[data["payload"]["actionType"]]
+ status, interaction_type = self._ACTION_TYPES[data["payload"]["actionType"]]
analytics_event = f"integrations.msteams.{status}"
analytics.record(
analytics_event,
@@ -478,13 +482,19 @@ def _issue_state_change(self, group: Group, identity: RpcIdentity, data) -> Resp
organization_id=group.project.organization.id,
)
- return client.put(
- path=f"/projects/{group.project.organization.slug}/{group.project.slug}/issues/",
- params={"id": group.id},
- data=action_data,
- user=user_service.get_user(user_id=identity.user_id),
- auth=event_write_key,
- )
+ with MessagingInteractionEvent(
+ interaction_type, MsTeamsMessagingSpec()
+ ).capture() as lifecycle:
+ response = client.put(
+ path=f"/projects/{group.project.organization.slug}/{group.project.slug}/issues/",
+ params={"id": group.id},
+ data=action_data,
+ user=user_service.get_user(user_id=identity.user_id),
+ auth=event_write_key,
+ )
+ if response.status_code >= 400:
+ lifecycle.record_failure()
+ return response
def _handle_action_submitted(self, request: Request) -> Response:
# pull out parameters
@@ -626,6 +636,10 @@ def _handle_personal_message(self, request: Request) -> Response:
class MsTeamsCommandDispatcher(MessagingIntegrationCommandDispatcher[AdaptiveCard]):
data: dict[str, Any]
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return MsTeamsMessagingSpec()
+
@property
def conversation_id(self) -> str:
return self.data["conversation"]["id"]
diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py
index afe5e2a896b37e..f97a632a8b69b0 100644
--- a/src/sentry/integrations/slack/webhooks/action.py
+++ b/src/sentry/integrations/slack/webhooks/action.py
@@ -25,6 +25,10 @@
from sentry.api.helpers.group_index import update_groups
from sentry.auth.access import from_member
from sentry.exceptions import UnableToAcceptMemberInvitationException
+from sentry.integrations.messaging.metrics import (
+ MessagingInteractionEvent,
+ MessagingInteractionType,
+)
from sentry.integrations.services.integration import integration_service
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
from sentry.integrations.slack.metrics import (
@@ -34,6 +38,7 @@
from sentry.integrations.slack.requests.action import SlackActionRequest
from sentry.integrations.slack.requests.base import SlackRequestError
from sentry.integrations.slack.sdk_client import SlackSdkClient
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.utils.errors import MODAL_NOT_FOUND, unpack_slack_api_error
from sentry.integrations.types import ExternalProviderEnum
from sentry.integrations.utils.scope import bind_org_context_from_integration
@@ -44,6 +49,7 @@
from sentry.notifications.services import notifications_service
from sentry.notifications.utils.actions import BlockKitMessageAction, MessageAction
from sentry.shared_integrations.exceptions import ApiError
+from sentry.users.models import User
from sentry.users.services.user import RpcUser
from sentry.utils import metrics
@@ -431,23 +437,33 @@ def _handle_group_actions(
# response_url later to update it.
defer_attachment_update = False
+ def record_event(interaction_type: MessagingInteractionType) -> MessagingInteractionEvent:
+ user = request.user
+ return MessagingInteractionEvent(
+ interaction_type,
+ SlackMessagingSpec(),
+ user=(user if isinstance(user, User) else None),
+ organization=(group.project.organization if group else None),
+ )
+
# Handle interaction actions
for action in action_list:
try:
- if action.name in (
- "status",
- "unresolved:ongoing",
- ):
- self.on_status(request, identity_user, group, action)
+ if action.name in ("status", "unresolved:ongoing"):
+ with record_event(MessagingInteractionType.STATUS).capture():
+ self.on_status(request, identity_user, group, action)
elif (
action.name == "assign"
): # TODO: remove this as it is replaced by the options-load endpoint
- self.on_assign(request, identity_user, group, action)
+ with record_event(MessagingInteractionType.ASSIGN).capture():
+ self.on_assign(request, identity_user, group, action)
elif action.name == "resolve_dialog":
- _ResolveDialog().open_dialog(slack_request, group)
+ with record_event(MessagingInteractionType.RESOLVE_DIALOG).capture():
+ _ResolveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
elif action.name == "archive_dialog":
- _ArchiveDialog().open_dialog(slack_request, group)
+ with record_event(MessagingInteractionType.ARCHIVE_DIALOG).capture():
+ _ArchiveDialog().open_dialog(slack_request, group)
defer_attachment_update = True
except client.ApiError as error:
return self.api_error(slack_request, group, identity_user, error, action.name)
diff --git a/src/sentry/integrations/slack/webhooks/base.py b/src/sentry/integrations/slack/webhooks/base.py
index f5a4c16a56cc0e..b0663cccebb8d6 100644
--- a/src/sentry/integrations/slack/webhooks/base.py
+++ b/src/sentry/integrations/slack/webhooks/base.py
@@ -16,12 +16,14 @@
MessagingIntegrationCommand,
MessagingIntegrationCommandDispatcher,
)
+from sentry.integrations.messaging.spec import MessagingIntegrationSpec
from sentry.integrations.slack.message_builder.help import SlackHelpMessageBuilder
from sentry.integrations.slack.metrics import (
SLACK_WEBHOOK_DM_ENDPOINT_FAILURE_DATADOG_METRIC,
SLACK_WEBHOOK_DM_ENDPOINT_SUCCESS_DATADOG_METRIC,
)
from sentry.integrations.slack.requests.base import SlackDMRequest, SlackRequestError
+from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.utils import metrics
LINK_USER_MESSAGE = (
@@ -127,6 +129,10 @@ class SlackCommandDispatcher(MessagingIntegrationCommandDispatcher[Response]):
endpoint: SlackDMEndpoint
request: SlackDMRequest
+ @property
+ def integration_spec(self) -> MessagingIntegrationSpec:
+ return SlackMessagingSpec()
+
@property
def command_handlers(
self,
diff --git a/src/sentry/integrations/utils/metrics.py b/src/sentry/integrations/utils/metrics.py
new file mode 100644
index 00000000000000..2dcdf719fd4c46
--- /dev/null
+++ b/src/sentry/integrations/utils/metrics.py
@@ -0,0 +1,191 @@
+import itertools
+import logging
+from abc import ABC, abstractmethod
+from collections.abc import Mapping
+from enum import Enum
+from types import TracebackType
+from typing import Any, Self
+
+from django.conf import settings
+
+from sentry.utils import metrics
+
+logger = logging.getLogger(__name__)
+
+
+class EventLifecycleOutcome(Enum):
+ STARTED = "STARTED"
+ HALTED = "HALTED"
+ SUCCESS = "SUCCESS"
+ FAILURE = "FAILURE"
+
+ def __str__(self) -> str:
+ return self.value.lower()
+
+
+class EventLifecycleMetric(ABC):
+ """Information about an event to be measured.
+
+ This class is intended to be used across different integrations that share the
+ same business concern. Generally a subclass would represent one business concern
+ (such as MessagingInteractionEvent, which extends this class and is used in the
+ `slack`, `msteams`, and `discord` integration packages).
+ """
+
+ @abstractmethod
+ def get_key(self, outcome: EventLifecycleOutcome) -> str:
+ """Construct the metrics key that will represent this event.
+
+ It is recommended to implement this method by delegating to a
+ `get_standard_key` call.
+ """
+
+ raise NotImplementedError
+
+ @staticmethod
+ def get_standard_key(
+ domain: str,
+ integration_name: str,
+ interaction_type: str,
+ outcome: EventLifecycleOutcome,
+ *extra_tokens: str,
+ ) -> str:
+ """Construct a key with a standard cross-integration structure.
+
+ Implementations of `get_key` generally should delegate to this method in
+ order to ensure consistency across integrations.
+
+ :param domain: a constant string representing the category of business
+ concern or vertical domain that the integration belongs
+ to (e.g., "messaging" or "source_code_management")
+ :param integration_name: the name of the integration (generally should match a
+ package name from `sentry.integrations`)
+ :param interaction_type: a key representing the category of interaction being
+ captured (generally should come from an Enum class)
+ :param outcome: the object representing the event outcome
+ :param extra_tokens: additional tokens to add extra context, if needed
+ :return: a key to represent the event in metrics or logging
+ """
+
+ # For now, universally include an "slo" token to distinguish from any
+ # previously existing metrics keys.
+ # TODO: Merge with or replace existing keys?
+ root_tokens = ("sentry", "integrations", "slo")
+
+ specific_tokens = (domain, integration_name, interaction_type, str(outcome))
+ return ".".join(itertools.chain(root_tokens, specific_tokens, extra_tokens))
+
+ def get_extras(self) -> Mapping[str, Any]:
+ """Get extra data to log."""
+ return {}
+
+ def capture(self, assume_success: bool = True) -> "EventLifecycle":
+ """Open a context to measure the event."""
+ return EventLifecycle(self, assume_success)
+
+
+class EventLifecycleStateError(Exception):
+ pass
+
+
+class EventLifecycle:
+ """Context object that measures an event that may succeed or fail.
+
+ The `assume_success` attribute can be set to False for events where exiting the
+ context may or may not represent a failure condition. In this state,
+ if the program exits the context without `record_success` or `record_failure`
+ being called first, it will log the outcome "halted" in place of "success" or
+ "failure". "Halted" could mean that we received an ambiguous exception from a
+ remote service that may have been caused either by a bug or user error, or merely
+ that inserting `record_failure` calls is still a dev to-do item.
+ """
+
+ def __init__(self, payload: EventLifecycleMetric, assume_success: bool = True) -> None:
+ self.payload = payload
+ self.assume_success = assume_success
+ self._state: EventLifecycleOutcome | None = None
+
+ def record_event(
+ self, outcome: EventLifecycleOutcome, exc: BaseException | None = None
+ ) -> None:
+ """Record a starting or halting event.
+
+ This method is public so that unit tests may mock it, but it should be called
+ only by the other "record" methods.
+ """
+
+ key = self.payload.get_key(outcome)
+
+ sample_rate = (
+ 1.0 if outcome == EventLifecycleOutcome.FAILURE else settings.SENTRY_METRICS_SAMPLE_RATE
+ )
+ metrics.incr(key, sample_rate=sample_rate)
+
+ if outcome == EventLifecycleOutcome.FAILURE:
+ logger.error(key, extra=self.payload.get_extras(), exc_info=exc)
+
+ def _terminate(
+ self, new_state: EventLifecycleOutcome, exc: BaseException | None = None
+ ) -> None:
+ if self._state is None:
+ raise EventLifecycleStateError("The lifecycle has not yet been entered")
+ if self._state != EventLifecycleOutcome.STARTED:
+ raise EventLifecycleStateError("The lifecycle has already been exited")
+ self._state = new_state
+ self.record_event(new_state, exc)
+
+ def record_success(self) -> None:
+ """Record that the event halted successfully.
+
+ Exiting the context without raising an exception will call this method
+ automatically, unless the context was initialized with `assume_success` set
+ to False.
+ """
+
+ self._terminate(EventLifecycleOutcome.SUCCESS)
+
+ def record_failure(self, exc: BaseException | None = None) -> None:
+ """Record that the event halted in failure.
+
+ There is no need to call this method directly if an exception is raised from
+ inside the context. It will be called automatically when exiting the context
+ on an exception.
+
+ This method should be called if we return a soft failure from the event. For
+ example, if we receive an error status from a remote service and gracefully
+ display an error response to the user, it would be necessary to manually call
+ `record_failure` on the context object.
+ """
+
+ self._terminate(EventLifecycleOutcome.FAILURE, exc)
+
+ def __enter__(self) -> Self:
+ if self._state is not None:
+ raise EventLifecycleStateError("The lifecycle has already been entered")
+ self._state = EventLifecycleOutcome.STARTED
+ self.record_event(EventLifecycleOutcome.STARTED)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType,
+ ) -> None:
+ if self._state != EventLifecycleOutcome.STARTED:
+ # The context called record_success or record_failure being closing,
+ # so we can just exit quietly.
+ return
+
+ if exc_value is not None:
+ # We were forced to exit the context by a raised exception.
+ self.record_failure(exc_value)
+ else:
+ # We exited the context without record_success or record_failure being
+ # called. Assume success if we were told to do so. Else, log a halt
+ # indicating that there is no clear success or failure signal.
+ self._terminate(
+ EventLifecycleOutcome.SUCCESS
+ if self.assume_success
+ else EventLifecycleOutcome.HALTED
+ )
diff --git a/tests/sentry/integrations/discord/webhooks/test_command.py b/tests/sentry/integrations/discord/webhooks/test_command.py
index 19bd0d6b8e51ad..185bc4cb99bfd3 100644
--- a/tests/sentry/integrations/discord/webhooks/test_command.py
+++ b/tests/sentry/integrations/discord/webhooks/test_command.py
@@ -4,6 +4,7 @@
from sentry.integrations.discord.requests.base import DiscordRequestTypes
from sentry.integrations.discord.webhooks.command import HELP_MESSAGE, NOT_LINKED_MESSAGE
from sentry.integrations.discord.webhooks.types import DiscordResponseTypes
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.testutils.cases import APITestCase
WEBHOOK_URL = "/extensions/discord/interactions/"
@@ -246,7 +247,8 @@ def test_unlink(self):
assert data["data"]["flags"] == EPHEMERAL_FLAG
assert response.status_code == 200
- def test_help(self):
+ @mock.patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
+ def test_help(self, mock_record):
with mock.patch(
"sentry.integrations.discord.requests.base.verify_signature", return_value=True
):
@@ -267,3 +269,8 @@ def test_help(self):
assert HELP_MESSAGE in data["data"]["content"]
assert data["data"]["flags"] == EPHEMERAL_FLAG
assert response.status_code == 200
+
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.HALTED
diff --git a/tests/sentry/integrations/discord/webhooks/test_message_component.py b/tests/sentry/integrations/discord/webhooks/test_message_component.py
index 85900376eac954..f0925d821cac87 100644
--- a/tests/sentry/integrations/discord/webhooks/test_message_component.py
+++ b/tests/sentry/integrations/discord/webhooks/test_message_component.py
@@ -2,6 +2,7 @@
from typing import Any
from unittest import mock
+from unittest.mock import patch
from sentry.integrations.discord.message_builder.base.component import (
DiscordComponentCustomIds as CustomIds,
@@ -23,6 +24,7 @@
RESOLVED_IN_NEXT_RELEASE,
UNRESOLVED,
)
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.models.release import Release
from sentry.silo.base import SiloMode
from sentry.testutils.cases import APITestCase
@@ -162,7 +164,8 @@ def test_assign_dialog_invalid_group_id(self):
assert response.status_code == 200
assert self.get_message_content(response) == INVALID_GROUP_ID
- def test_assign(self):
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
+ def test_assign(self, mock_record):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.SELECT,
@@ -173,6 +176,11 @@ def test_assign(self):
assert response.status_code == 200
assert self.get_message_content(response) == ASSIGNEE_UPDATED
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.SUCCESS
+
def test_resolve_dialog(self):
response = self.send_interaction(
{
diff --git a/tests/sentry/integrations/msteams/test_action_state_change.py b/tests/sentry/integrations/msteams/test_action_state_change.py
index f41994c0e639a0..122ccde4602444 100644
--- a/tests/sentry/integrations/msteams/test_action_state_change.py
+++ b/tests/sentry/integrations/msteams/test_action_state_change.py
@@ -11,6 +11,7 @@
from sentry.integrations.msteams.constants import SALT
from sentry.integrations.msteams.link_identity import build_linking_url
from sentry.integrations.msteams.utils import ACTION_TYPE
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.models.activity import Activity, ActivityIntegration
from sentry.models.authidentity import AuthIdentity
from sentry.models.authprovider import AuthProvider
@@ -227,8 +228,9 @@ def test_assign_to_team(self, verify):
}
@responses.activate
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
@patch("sentry.integrations.msteams.webhook.verify_signature", return_value=True)
- def test_assign_to_me(self, verify):
+ def test_assign_to_me(self, verify, mock_record):
resp = self.post_webhook(action_type=ACTION_TYPE.ASSIGN, assign_input="ME")
assert resp.status_code == 200, resp.content
@@ -244,6 +246,11 @@ def test_assign_to_me(self, verify):
"integration": ActivityIntegration.MSTEAMS.value,
}
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.SUCCESS
+
@responses.activate
@patch("sentry.integrations.msteams.webhook.verify_signature", return_value=True)
def test_assign_to_me_personal_message(self, verify):
diff --git a/tests/sentry/integrations/msteams/test_webhook.py b/tests/sentry/integrations/msteams/test_webhook.py
index e253f860952b70..d699e9cd8d177f 100644
--- a/tests/sentry/integrations/msteams/test_webhook.py
+++ b/tests/sentry/integrations/msteams/test_webhook.py
@@ -10,6 +10,7 @@
from sentry.integrations.models.integration import Integration
from sentry.integrations.msteams.utils import ACTION_TYPE
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.silo.base import SiloMode
from sentry.testutils.cases import APITestCase
from sentry.testutils.silo import assume_test_silo_mode
@@ -396,9 +397,10 @@ def test_unlink_user(self, mock_time, mock_decode):
assert "Bearer my_token" in responses.calls[3].request.headers["Authorization"]
@responses.activate
+ @mock.patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
@mock.patch("sentry.utils.jwt.decode")
@mock.patch("time.time")
- def test_help_command(self, mock_time, mock_decode):
+ def test_help_command(self, mock_time, mock_decode, mock_record):
other_command = deepcopy(EXAMPLE_UNLINK_COMMAND)
other_command["text"] = "Help"
access_json = {"expires_in": 86399, "access_token": "my_token"}
@@ -428,6 +430,11 @@ def test_help_command(self, mock_time, mock_decode):
].request.body.decode("utf-8")
assert "Bearer my_token" in responses.calls[3].request.headers["Authorization"]
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.HALTED
+
@responses.activate
@mock.patch("sentry.utils.jwt.decode")
@mock.patch("time.time")
diff --git a/tests/sentry/integrations/slack/webhooks/actions/test_status.py b/tests/sentry/integrations/slack/webhooks/actions/test_status.py
index 96e2107d2a1d90..79c8b9fb801e55 100644
--- a/tests/sentry/integrations/slack/webhooks/actions/test_status.py
+++ b/tests/sentry/integrations/slack/webhooks/actions/test_status.py
@@ -14,6 +14,7 @@
LINK_IDENTITY_MESSAGE,
UNLINK_IDENTITY_MESSAGE,
)
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.issues.grouptype import PerformanceNPlusOneGroupType
from sentry.models.activity import Activity, ActivityIntegration
from sentry.models.authidentity import AuthIdentity
@@ -234,8 +235,9 @@ def test_ask_linking(self):
assert resp.data["response_type"] == "ephemeral"
assert resp.data["text"] == LINK_IDENTITY_MESSAGE.format(associate_url=associate_url)
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
@patch("sentry.integrations.slack.message_builder.issues.get_tags", return_value=[])
- def test_archive_issue_until_escalating(self, mock_tags):
+ def test_archive_issue_until_escalating(self, mock_tags, mock_record):
original_message = self.get_original_message(self.group.id)
self.archive_issue(original_message, "ignored:archived_until_escalating")
@@ -253,6 +255,11 @@ def test_archive_issue_until_escalating(self, mock_tags):
assert "via" not in blocks[4]["elements"][0]["text"]
assert ":white_circle:" in blocks[0]["text"]["text"]
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.SUCCESS
+
@patch("sentry.integrations.slack.message_builder.issues.get_tags", return_value=[])
def test_archive_issue_until_escalating_through_unfurl(self, mock_tags):
original_message = self.get_original_message(self.group.id)
diff --git a/tests/sentry/integrations/slack/webhooks/commands/test_link_team.py b/tests/sentry/integrations/slack/webhooks/commands/test_link_team.py
index 61acf7a28eaf17..ede5b4302c07c1 100644
--- a/tests/sentry/integrations/slack/webhooks/commands/test_link_team.py
+++ b/tests/sentry/integrations/slack/webhooks/commands/test_link_team.py
@@ -1,3 +1,5 @@
+from unittest.mock import patch
+
import orjson
import responses
from rest_framework import status
@@ -9,6 +11,7 @@
LINK_USER_FIRST_MESSAGE,
TEAM_NOT_LINKED_MESSAGE,
)
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.silo.base import SiloMode
from sentry.testutils.helpers import get_response_text, link_user
from sentry.testutils.silo import assume_test_silo_mode
@@ -39,8 +42,9 @@ def setUp(self):
class SlackCommandsLinkTeamTest(SlackCommandsLinkTeamTestBase):
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
@responses.activate
- def test_link_another_team_to_channel(self):
+ def test_link_another_team_to_channel(self, mock_record):
"""
Test that we block a user who tries to link a second team to a
channel that already has a team linked to it.
@@ -59,6 +63,11 @@ def test_link_another_team_to_channel(self):
data = orjson.loads(response.content)
assert CHANNEL_ALREADY_LINKED_MESSAGE in get_response_text(data)
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.HALTED
+
@responses.activate
def test_link_team_from_dm(self):
"""
diff --git a/tests/sentry/integrations/slack/webhooks/commands/test_link_user.py b/tests/sentry/integrations/slack/webhooks/commands/test_link_user.py
index e1da2d55e0d6ce..55041dfd57afd0 100644
--- a/tests/sentry/integrations/slack/webhooks/commands/test_link_user.py
+++ b/tests/sentry/integrations/slack/webhooks/commands/test_link_user.py
@@ -1,3 +1,5 @@
+from unittest.mock import patch
+
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.integrations.slack.views.link_identity import SUCCESS_LINKED_MESSAGE, build_linking_url
from sentry.integrations.slack.views.unlink_identity import (
@@ -5,6 +7,7 @@
build_unlinking_url,
)
from sentry.integrations.slack.webhooks.base import NOT_LINKED_MESSAGE
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.testutils.helpers import get_response_text
from sentry.testutils.silo import control_silo_test
from sentry.users.models.identity import Identity
@@ -102,11 +105,17 @@ def test_404(self):
class SlackCommandsUnlinkUserTest(SlackCommandsTest):
"""Slash commands results are generated on Region Silo"""
- def test_unlink_command(self):
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
+ def test_unlink_command(self, mock_record):
self.link_user()
data = self.send_slack_message("unlink")
assert "to unlink your identity" in get_response_text(data)
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.HALTED
+
def test_unlink_command_already_unlinked(self):
data = self.send_slack_message("unlink")
assert NOT_LINKED_MESSAGE in get_response_text(data)
diff --git a/tests/sentry/integrations/slack/webhooks/events/test_message_im.py b/tests/sentry/integrations/slack/webhooks/events/test_message_im.py
index 5e1afecb9b1241..1b2b28d92b29a0 100644
--- a/tests/sentry/integrations/slack/webhooks/events/test_message_im.py
+++ b/tests/sentry/integrations/slack/webhooks/events/test_message_im.py
@@ -4,6 +4,7 @@
import pytest
from slack_sdk.web import SlackResponse
+from sentry.integrations.utils.metrics import EventLifecycleOutcome
from sentry.silo.base import SiloMode
from sentry.testutils.cases import IntegratedApiTestCase
from sentry.testutils.helpers import get_response_text
@@ -92,7 +93,8 @@ def test_user_message_im_notification_platform(self):
== "Here are the commands you can use. Commands not working? Re-install the app!"
)
- def test_user_message_link(self):
+ @patch("sentry.integrations.utils.metrics.EventLifecycle.record_event")
+ def test_user_message_link(self, mock_record):
"""
Test that when a user types in "link" to the DM we reply with the correct response.
"""
@@ -105,6 +107,11 @@ def test_user_message_link(self):
data = self.mock_post.call_args[1]
assert "Link your Slack identity" in get_response_text(data)
+ assert len(mock_record.mock_calls) == 2
+ start, halt = mock_record.mock_calls
+ assert start.args[0] == EventLifecycleOutcome.STARTED
+ assert halt.args[0] == EventLifecycleOutcome.HALTED
+
def test_user_message_already_linked_sdk(self):
"""
Test that when a user who has already linked their identity types in
From 5bc828b169f46afe146248e8ca1bae5f7ab006d8 Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Thu, 3 Oct 2024 13:52:10 -0700
Subject: [PATCH 098/139] ref(replay): query custom tags from replays dataset
(#78573)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Depends on https://github.com/getsentry/sentry/pull/78534
Closes https://github.com/getsentry/sentry/issues/78531 🎉
This query is used for the "Tags" section in replay search bar's
suggestions/typeahead. With this, we'll only display tags placed on
replays for that org (not on issues, traces, etc).
---
static/app/views/alerts/rules/metric/types.tsx | 1 +
static/app/views/alerts/wizard/options.tsx | 2 +-
static/app/views/alerts/wizard/utils.tsx | 2 +-
static/app/views/replays/list/replaySearchBar.tsx | 13 +++++--------
4 files changed, 8 insertions(+), 10 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/types.tsx b/static/app/views/alerts/rules/metric/types.tsx
index 3bb2c0fc95f260..357f119c9714de 100644
--- a/static/app/views/alerts/rules/metric/types.tsx
+++ b/static/app/views/alerts/rules/metric/types.tsx
@@ -41,6 +41,7 @@ export enum Dataset {
/** Also used for crash free alerts */
METRICS = 'metrics',
ISSUE_PLATFORM = 'search_issues',
+ REPLAYS = 'replays',
}
export enum EventTypes {
diff --git a/static/app/views/alerts/wizard/options.tsx b/static/app/views/alerts/wizard/options.tsx
index e25cd7bcefa5da..04d93bb1dc0ad6 100644
--- a/static/app/views/alerts/wizard/options.tsx
+++ b/static/app/views/alerts/wizard/options.tsx
@@ -65,7 +65,7 @@ export enum MEPAlertsDataset {
export type MetricAlertType = Exclude;
export const DatasetMEPAlertQueryTypes: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
MEPAlertsQueryType
> = {
[Dataset.ERRORS]: MEPAlertsQueryType.ERROR,
diff --git a/static/app/views/alerts/wizard/utils.tsx b/static/app/views/alerts/wizard/utils.tsx
index c52002d16544f0..be16a01def8d56 100644
--- a/static/app/views/alerts/wizard/utils.tsx
+++ b/static/app/views/alerts/wizard/utils.tsx
@@ -6,7 +6,7 @@ import type {MetricAlertType, WizardRuleTemplate} from './options';
// A set of unique identifiers to be able to tie aggregate and dataset back to a wizard alert type
const alertTypeIdentifiers: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
Partial>
> = {
[Dataset.ERRORS]: {
diff --git a/static/app/views/replays/list/replaySearchBar.tsx b/static/app/views/replays/list/replaySearchBar.tsx
index d2ef35da9e2fbc..a76486e6a20a01 100644
--- a/static/app/views/replays/list/replaySearchBar.tsx
+++ b/static/app/views/replays/list/replaySearchBar.tsx
@@ -129,7 +129,7 @@ function ReplaySearchBar(props: Props) {
{
orgSlug: organization.slug,
projectIds: projectIds.map(String),
- dataset: Dataset.ISSUE_PLATFORM,
+ dataset: Dataset.REPLAYS,
useCache: true,
enabled: true,
keepPreviousData: false,
@@ -139,7 +139,7 @@ function ReplaySearchBar(props: Props) {
},
{}
);
- const issuePlatformTags: TagCollection = useMemo(() => {
+ const customTags: TagCollection = useMemo(() => {
return (tagQuery.data ?? []).reduce((acc, tag) => {
acc[tag.key] = {...tag, kind: FieldKind.TAG};
return acc;
@@ -147,13 +147,10 @@ function ReplaySearchBar(props: Props) {
}, [tagQuery]);
// tagQuery.isLoading and tagQuery.isError are not used
- const filterKeys = useMemo(
- () => getReplayFilterKeys(issuePlatformTags),
- [issuePlatformTags]
- );
+ const filterKeys = useMemo(() => getReplayFilterKeys(customTags), [customTags]);
const filterKeySections = useMemo(() => {
- return getFilterKeySections(issuePlatformTags, organization);
- }, [issuePlatformTags, organization]);
+ return getFilterKeySections(customTags, organization);
+ }, [customTags, organization]);
const getTagValues = useCallback(
(tag: Tag, searchQuery: string): Promise => {
From a1acfad04851c894d29daa5e2ddd78d25a506be6 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 13:52:47 -0700
Subject: [PATCH 099/139] fix(issues): Differentiate between linked/unlinked
integrations (#78572)
---
.../externalIssuesList/streamlinedExternalIssueList.tsx | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
index 8d98886a1a8ab6..92a955cd0115a9 100644
--- a/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
+++ b/static/app/components/group/externalIssuesList/streamlinedExternalIssueList.tsx
@@ -13,6 +13,7 @@ import type {Event} from 'sentry/types/event';
import type {Group} from 'sentry/types/group';
import type {Project} from 'sentry/types/project';
import useOrganization from 'sentry/utils/useOrganization';
+import {Divider} from 'sentry/views/issueDetails/divider';
import useStreamLinedExternalIssueData from './hooks/useGroupExternalIssues';
@@ -82,6 +83,7 @@ export function StreamlinedExternalIssueList({
))}
+ {integrations.length > 0 && linkedIssues.length > 0 ? : null}
{integrations.map(integration => {
const sharedButtonProps: ButtonProps = {
size: 'zero',
@@ -138,6 +140,7 @@ const IssueActionWrapper = styled('div')`
display: flex;
flex-wrap: wrap;
gap: ${space(1)};
+ line-height: 1.2;
`;
const StyledSectionTitle = styled(SidebarSection.Title)`
@@ -148,8 +151,7 @@ const LinkedIssue = styled(LinkButton)`
display: flex;
align-items: center;
padding: ${space(0.5)} ${space(0.75)};
- line-height: 1.05;
- border: 1px dashed ${p => p.theme.border};
+ border: 1px solid ${p => p.theme.border};
border-radius: ${p => p.theme.borderRadius};
font-weight: normal;
`;
@@ -158,7 +160,6 @@ const IssueActionButton = styled(Button)`
display: flex;
align-items: center;
padding: ${space(0.5)} ${space(0.75)};
- line-height: 1.05;
border: 1px dashed ${p => p.theme.border};
border-radius: ${p => p.theme.borderRadius};
font-weight: normal;
From c13a3a81928fc0852bd16990f0a1652746ccef3f Mon Sep 17 00:00:00 2001
From: Michelle Fu <83109586+mifu67@users.noreply.github.com>
Date: Thu, 3 Oct 2024 14:08:30 -0700
Subject: [PATCH 100/139] nit(anomaly detection): frontend polish (#78493)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Implement changes for polish as discussed.
---
.../views/alerts/rules/metric/constants.tsx | 1 +
.../alerts/rules/metric/details/sidebar.tsx | 2 +-
.../alerts/rules/metric/ruleForm.spec.tsx | 5 --
.../views/alerts/rules/metric/ruleForm.tsx | 57 ++++++++++++-------
.../alerts/rules/metric/thresholdTypeForm.tsx | 6 +-
5 files changed, 44 insertions(+), 27 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/constants.tsx b/static/app/views/alerts/rules/metric/constants.tsx
index 93a91e3824f69f..b20254c822cc29 100644
--- a/static/app/views/alerts/rules/metric/constants.tsx
+++ b/static/app/views/alerts/rules/metric/constants.tsx
@@ -24,6 +24,7 @@ import type {AlertType, WizardRuleTemplate} from 'sentry/views/alerts/wizard/opt
export const DEFAULT_COUNT_TIME_WINDOW = 1; // 1min
export const DEFAULT_CHANGE_TIME_WINDOW = 60; // 1h
+export const DEFAULT_DYNAMIC_TIME_WINDOW = 60; // 1h
export const DEFAULT_CHANGE_COMP_DELTA = 10080; // 1w
export const DEFAULT_AGGREGATE = 'count()';
diff --git a/static/app/views/alerts/rules/metric/details/sidebar.tsx b/static/app/views/alerts/rules/metric/details/sidebar.tsx
index 1bb5e141e03f0b..40529ae9027103 100644
--- a/static/app/views/alerts/rules/metric/details/sidebar.tsx
+++ b/static/app/views/alerts/rules/metric/details/sidebar.tsx
@@ -290,7 +290,7 @@ export function MetricDetailsSidebar({
/>
{rule.detectionType === AlertRuleComparisonType.DYNAMIC && (
{
dataset: 'events',
},
});
- expect(
- await screen.findByLabelText(
- 'Anomaly: whenever values are outside of expected bounds'
- )
- ).toBeChecked();
expect(
await screen.findByRole('textbox', {name: 'Level of responsiveness'})
).toBeInTheDocument();
diff --git a/static/app/views/alerts/rules/metric/ruleForm.tsx b/static/app/views/alerts/rules/metric/ruleForm.tsx
index 02538732cc6ed4..e05e34c681d0f5 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.tsx
@@ -77,6 +77,7 @@ import {
DEFAULT_CHANGE_COMP_DELTA,
DEFAULT_CHANGE_TIME_WINDOW,
DEFAULT_COUNT_TIME_WINDOW,
+ DEFAULT_DYNAMIC_TIME_WINDOW,
} from './constants';
import RuleConditionsForm from './ruleConditionsForm';
import {
@@ -918,26 +919,42 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
};
handleComparisonTypeChange = (value: AlertRuleComparisonType) => {
- const comparisonDelta =
- value === AlertRuleComparisonType.CHANGE
- ? this.state.comparisonDelta ?? DEFAULT_CHANGE_COMP_DELTA
- : undefined;
- const timeWindow = this.state.comparisonDelta
- ? DEFAULT_COUNT_TIME_WINDOW
- : DEFAULT_CHANGE_TIME_WINDOW;
- const sensitivity =
- value === AlertRuleComparisonType.DYNAMIC
- ? this.state.sensitivity || AlertRuleSensitivity.MEDIUM
- : undefined;
- const seasonality =
- value === AlertRuleComparisonType.DYNAMIC ? AlertRuleSeasonality.AUTO : undefined;
- this.setState({
- comparisonType: value,
- comparisonDelta,
- timeWindow,
- sensitivity,
- seasonality,
- });
+ let updateState = {};
+ switch (value) {
+ case AlertRuleComparisonType.DYNAMIC:
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: undefined,
+ thresholdType: AlertRuleThresholdType.ABOVE_AND_BELOW,
+ timeWindow: DEFAULT_DYNAMIC_TIME_WINDOW,
+ sensitivity: AlertRuleSensitivity.MEDIUM,
+ seasonality: AlertRuleSeasonality.AUTO,
+ };
+ break;
+ case AlertRuleComparisonType.CHANGE:
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: DEFAULT_CHANGE_COMP_DELTA,
+ thresholdType: AlertRuleThresholdType.ABOVE,
+ timeWindow: DEFAULT_CHANGE_TIME_WINDOW,
+ sensitivity: undefined,
+ seasonality: undefined,
+ };
+ break;
+ case AlertRuleComparisonType.COUNT:
+ updateState = {
+ comparisonType: value,
+ comparisonDelta: undefined,
+ thresholdType: AlertRuleThresholdType.ABOVE,
+ timeWindow: DEFAULT_COUNT_TIME_WINDOW,
+ sensitivity: undefined,
+ seasonality: undefined,
+ };
+ break;
+ default:
+ break;
+ }
+ this.setState(updateState);
};
handleDeleteRule = async () => {
diff --git a/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx b/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
index 889f963449ad31..b5404a2502fb4a 100644
--- a/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
+++ b/static/app/views/alerts/rules/metric/thresholdTypeForm.tsx
@@ -1,6 +1,7 @@
import styled from '@emotion/styled';
import Feature from 'sentry/components/acl/feature';
+import FeatureBadge from 'sentry/components/badge/featureBadge';
import RadioGroup, {type RadioOption} from 'sentry/components/forms/controls/radioGroup';
import SelectControl from 'sentry/components/forms/controls/selectControl';
import {t} from 'sentry/locale';
@@ -98,7 +99,10 @@ function ThresholdTypeForm({
if (hasAnomalyDetection && validAnomalyDetectionAlertTypes.has(alertType)) {
thresholdTypeChoices.push([
AlertRuleComparisonType.DYNAMIC,
- 'Anomaly: whenever values are outside of expected bounds',
+
+ {t('Anomaly: whenever values are outside of expected bounds')}
+
+ ,
] as RadioOption);
}
From 7b7b0d76c082900688ce2a29dddde0d76e0029a0 Mon Sep 17 00:00:00 2001
From: mia hsu <55610339+ameliahsu@users.noreply.github.com>
Date: Thu, 3 Oct 2024 14:10:56 -0700
Subject: [PATCH 101/139] feat(invite-members): new invite modal design
(#77528)
changes to note:
- added ability to copy/paste lists of emails
- removed the "Add another" button
- new submission flow (successfully sent emails are removed and the
inputs are no longer disabled after submission, so users can continue to
input emails without needing to select "Send more invites")
- slight change to the success/error messages
- refactored invite modal code
next change will be to stop overflow of email address input (I want to
do that in a separate PR since the `SelectControl` component is widely
used in other places)
**before:**
https://github.com/user-attachments/assets/daf4a7bb-1e5d-4d0b-bfb7-3fbf3469cf3a
**after:**
https://github.com/user-attachments/assets/0d037342-9d11-45a8-abd8-4146599269ca
---
.../modals/inviteMembersModal/index.tsx | 60 ++++-
.../inviteHeaderMessages.tsx | 33 +++
.../inviteMembersContext.tsx | 56 +++++
.../inviteMembersFooter.spec.tsx | 88 +++++++
.../inviteMembersFooter.tsx | 80 ++++++
.../inviteRowControlNew.spec.tsx | 138 +++++++++++
.../inviteRowControlNew.tsx | 227 ++++++++++++++++++
.../inviteStatusMessage.tsx | 100 +++++++-
.../inviteMembersModal/renderEmailValue.tsx | 6 +-
.../inviteMembersModal/useInviteModal.tsx | 31 ++-
10 files changed, 809 insertions(+), 10 deletions(-)
create mode 100644 static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx
create mode 100644 static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx
create mode 100644 static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx
create mode 100644 static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx
create mode 100644 static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx
create mode 100644 static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
diff --git a/static/app/components/modals/inviteMembersModal/index.tsx b/static/app/components/modals/inviteMembersModal/index.tsx
index a4f4753780e98a..88c3128b848b4e 100644
--- a/static/app/components/modals/inviteMembersModal/index.tsx
+++ b/static/app/components/modals/inviteMembersModal/index.tsx
@@ -1,14 +1,24 @@
import {css} from '@emotion/react';
+import styled from '@emotion/styled';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
import ErrorBoundary from 'sentry/components/errorBoundary';
import LoadingError from 'sentry/components/loadingError';
import LoadingIndicator from 'sentry/components/loadingIndicator';
+import {
+ ErrorAlert,
+ InviteMessage,
+} from 'sentry/components/modals/inviteMembersModal/inviteHeaderMessages';
+import {InviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteMembersFooter from 'sentry/components/modals/inviteMembersModal/inviteMembersFooter';
import InviteMembersModalView from 'sentry/components/modals/inviteMembersModal/inviteMembersModalview';
+import InviteRowControl from 'sentry/components/modals/inviteMembersModal/inviteRowControlNew';
import type {InviteRow} from 'sentry/components/modals/inviteMembersModal/types';
import useInviteModal from 'sentry/components/modals/inviteMembersModal/useInviteModal';
import {InviteModalHook} from 'sentry/components/modals/memberInviteModalCustomization';
+import {ORG_ROLES} from 'sentry/constants';
import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
import {trackAnalytics} from 'sentry/utils/analytics';
import {isActiveSuperuser} from 'sentry/utils/isActiveSuperuser';
import useOrganization from 'sentry/utils/useOrganization';
@@ -19,6 +29,8 @@ interface InviteMembersModalProps extends ModalRenderProps {
}
function InviteMembersModal({
+ Header,
+ Body,
closeModal,
initialData,
source,
@@ -37,6 +49,7 @@ function InviteMembersModal({
setEmails,
setRole,
setTeams,
+ setInviteStatus,
willInvite,
complete,
inviteStatus,
@@ -70,7 +83,41 @@ function InviteMembersModal({
onSendInvites={sendInvites}
>
{({sendInvites: _sendInvites, canSend, headerInfo}) => {
- return (
+ return organization.features.includes('invite-members-new-modal') ? (
+
+
+
+ {t('Invite New Members')}
+
+
+
+ {headerInfo}
+
+
+
+
+ ) : (
p.theme.fontWeightNormal};
+ font-size: ${p => p.theme.headerFontSize};
+ margin-top: 0;
+ margin-bottom: ${space(0.75)};
+`;
+
+const StyledInviteRow = styled(InviteRowControl)`
+ margin-bottom: ${space(1.5)};
+`;
+
export default InviteMembersModal;
diff --git a/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx b/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx
new file mode 100644
index 00000000000000..a08daeb972a89a
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteHeaderMessages.tsx
@@ -0,0 +1,33 @@
+import styled from '@emotion/styled';
+
+import Alert from 'sentry/components/alert';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+
+export function ErrorAlert() {
+ const {error} = useInviteMembersContext();
+ return error ? (
+
+ {error}
+
+ ) : null;
+}
+
+export function InviteMessage() {
+ const {willInvite} = useInviteMembersContext();
+ return willInvite ? (
+ {t('Invite unlimited new members to join your organization.')}
+ ) : (
+
+ {t(
+ 'You can’t invite users directly, but we’ll forward your request to an org owner or manager for approval.'
+ )}
+
+ );
+}
+
+const Subtext = styled('p')`
+ color: ${p => p.theme.subText};
+ margin-bottom: ${space(3)};
+`;
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx
new file mode 100644
index 00000000000000..208b0002fd943f
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersContext.tsx
@@ -0,0 +1,56 @@
+import {createContext, useContext} from 'react';
+
+import type {
+ InviteRow,
+ InviteStatus,
+ NormalizedInvite,
+} from 'sentry/components/modals/inviteMembersModal/types';
+
+export type InviteMembersContextValue = {
+ complete: boolean;
+ inviteStatus: InviteStatus;
+ invites: NormalizedInvite[];
+ pendingInvites: InviteRow;
+ reset: () => void;
+ sendInvites: () => void;
+ sendingInvites: boolean;
+ setEmails: (emails: string[], index: number) => void;
+ setInviteStatus: (inviteStatus: InviteStatus) => void;
+ setRole: (role: string, index: number) => void;
+ setTeams: (teams: string[], index: number) => void;
+ willInvite: boolean;
+ error?: string;
+};
+
+export const defaultInviteProps = {
+ complete: false,
+ inviteStatus: {},
+ invites: [],
+ pendingInvites: {
+ emails: new Set(),
+ role: '',
+ teams: new Set(),
+ },
+ reset: () => {},
+ sendInvites: () => {},
+ sendingInvites: false,
+ setEmails: () => {},
+ setRole: () => {},
+ setTeams: () => {},
+ setInviteStatus: () => {},
+ willInvite: false,
+};
+
+export const InviteMembersContext = createContext(null);
+
+export function useInviteMembersContext(): InviteMembersContextValue {
+ const context = useContext(InviteMembersContext);
+
+ if (!context) {
+ throw new Error(
+ 'useInviteMembersContext must be used within a InviteMembersContext.Provider'
+ );
+ }
+
+ return context;
+}
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx
new file mode 100644
index 00000000000000..8cfc649a7c9f11
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.spec.tsx
@@ -0,0 +1,88 @@
+import {OrganizationFixture} from 'sentry-fixture/organization';
+
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import {
+ defaultInviteProps,
+ InviteMembersContext,
+} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteMembersFooter from 'sentry/components/modals/inviteMembersModal/inviteMembersFooter';
+
+describe('InviteRowControlNew', function () {
+ const renderComponent = props => {
+ render(
+
+
+ ,
+ {organization: OrganizationFixture({features: ['invite-members-new-modal']})}
+ );
+ };
+
+ it('disables send button when there are no emails', function () {
+ renderComponent({});
+
+ const sendButton = screen.getByLabelText(/send invite/i);
+ expect(sendButton).toBeDisabled();
+ });
+
+ it('enables send button when there are emails', async function () {
+ const mockSetInviteStatus = jest.fn();
+ const mockSendInvites = jest.fn();
+ renderComponent({
+ invites: [
+ {
+ email: 'moo-deng@email.com',
+ role: 'member',
+ teams: new Set(['moo-deng']),
+ },
+ ],
+ setInviteStatus: mockSetInviteStatus,
+ sendInvites: mockSendInvites,
+ });
+
+ const sendButton = screen.getByLabelText(/send invite/i);
+ expect(sendButton).toBeEnabled();
+ await userEvent.click(sendButton);
+ expect(mockSetInviteStatus).toHaveBeenCalled();
+ expect(mockSendInvites).toHaveBeenCalled();
+ });
+
+ it('displays correct status message for sent invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: true},
+ 'moo-waan': {sent: true},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByTestId('sent-invites')).toHaveTextContent(/2/i);
+ expect(screen.queryByTestId('failed-invites')).not.toBeInTheDocument();
+ });
+
+ it('displays correct status message for failed invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: false, error: 'Error'},
+ 'moo-waan': {sent: false, error: 'Error'},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByText(/2/i)).toBeInTheDocument();
+ });
+
+ it('displays correct status message for sent and failed invites', function () {
+ renderComponent({
+ complete: true,
+ inviteStatus: {
+ 'moo-deng': {sent: true},
+ 'moo-waan': {sent: true},
+ 'moo-toon': {sent: false, error: 'Error'},
+ },
+ willInvite: true,
+ });
+ expect(screen.getByTestId('sent-invites')).toHaveTextContent(/2/i);
+ expect(screen.getByTestId('failed-invites')).toHaveTextContent(/1/i);
+ });
+});
diff --git a/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx
new file mode 100644
index 00000000000000..a130cc89a194e4
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteMembersFooter.tsx
@@ -0,0 +1,80 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import ButtonBar from 'sentry/components/buttonBar';
+import InviteButton from 'sentry/components/modals/inviteMembersModal/inviteButton';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteStatusMessage from 'sentry/components/modals/inviteMembersModal/inviteStatusMessage';
+import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
+
+interface Props {
+ canSend: boolean;
+}
+
+export default function InviteMembersFooter({canSend}: Props) {
+ const organization = useOrganization();
+ const {
+ complete,
+ inviteStatus,
+ setInviteStatus,
+ invites,
+ pendingInvites,
+ sendInvites,
+ sendingInvites,
+ willInvite,
+ } = useInviteMembersContext();
+ const isValidInvites = invites.length > 0;
+
+ const removeSentInvites = () => {
+ const emails = Object.keys(inviteStatus);
+ let newInviteStatus = {};
+ emails.forEach(email => {
+ if (pendingInvites.emails.has(email)) {
+ newInviteStatus = {...newInviteStatus, [email]: inviteStatus[email]};
+ }
+ });
+ setInviteStatus(newInviteStatus);
+ };
+
+ return (
+
+
+ {/* TODO(mia): remove these props and use InviteMemberContext once old modal is removed */}
+
+
+
+
+ {
+ organization.features.includes('invite-members-new-modal') &&
+ removeSentInvites();
+ sendInvites();
+ }}
+ />
+
+
+
+ );
+}
+
+const FooterContent = styled('div')`
+ display: flex;
+ gap: ${space(1)};
+ align-items: center;
+ justify-content: space-between;
+ flex: 1;
+`;
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx
new file mode 100644
index 00000000000000..7d5b3eb03792bd
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.spec.tsx
@@ -0,0 +1,138 @@
+import {TeamFixture} from 'sentry-fixture/team';
+
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import {
+ defaultInviteProps,
+ InviteMembersContext,
+} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import InviteRowControlNew from 'sentry/components/modals/inviteMembersModal/inviteRowControlNew';
+import TeamStore from 'sentry/stores/teamStore';
+
+describe('InviteRowControlNew', function () {
+ const teamData = [
+ {
+ id: '1',
+ slug: 'moo-deng',
+ name: "Moo Deng's Team",
+ },
+ {
+ id: '2',
+ slug: 'moo-waan',
+ name: "Moo Waan's Team",
+ },
+ ];
+ const teams = teamData.map(data => TeamFixture(data));
+
+ const getComponent = props => (
+
+
+
+ );
+
+ beforeEach(function () {
+ TeamStore.loadInitialData(teams);
+ });
+
+ it('renders', function () {
+ render(getComponent(defaultInviteProps));
+
+ expect(screen.getByText('Email addresses')).toBeInTheDocument();
+ expect(screen.getByText('Role')).toBeInTheDocument();
+ expect(screen.getByText('Add to team')).toBeInTheDocument();
+ });
+
+ describe.each([
+ {email: 'test-space@example.com', delimiter: ' '},
+ {email: 'test-comma@example.com', delimiter: ','},
+ {email: 'test-newline@example.com', delimiter: '{enter}'},
+ ])('updates email addresses when new emails are inputted', ({email, delimiter}) => {
+ it(`invokes the mock correctly with one using delimiter "${delimiter}"`, async () => {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ expect(mockSetEmails).toHaveBeenCalled();
+ });
+
+ it(`invokes the mock correctly with many using delimiter "${delimiter}"`, async () => {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ await userEvent.type(emailInput, `${email}${delimiter}`);
+ expect(mockSetEmails).toHaveBeenCalledTimes(3);
+ });
+ });
+
+ it('updates email addresses when new emails are inputted and input is unfocussed', async function () {
+ const mockSetEmails = jest.fn();
+ render(getComponent({...defaultInviteProps, setEmails: mockSetEmails}));
+ const emailInput = screen.getByLabelText('Email Addresses');
+ await userEvent.type(emailInput, 'test-unfocus@example.com');
+ await userEvent.tab();
+ expect(mockSetEmails).toHaveBeenCalled();
+ });
+
+ it('updates role value when new role is selected', async function () {
+ const mockSetRole = jest.fn();
+ render(getComponent({...defaultInviteProps, setRole: mockSetRole}));
+ const roleInput = screen.getByLabelText('Role');
+ await userEvent.click(roleInput);
+ await userEvent.click(screen.getByText('Billing'));
+ expect(mockSetRole).toHaveBeenCalled();
+ });
+
+ it('disables team selection when team roles are not allowed', function () {
+ render(
+ getComponent({
+ ...defaultInviteProps,
+ pendingInvites: {
+ ...defaultInviteProps.pendingInvites,
+ role: 'billing',
+ },
+ })
+ );
+ const teamInput = screen.getByLabelText('Add to Team');
+ expect(teamInput).toBeDisabled();
+ });
+
+ it('enables team selection when team roles are allowed', async function () {
+ const mockSetTeams = jest.fn();
+ render(
+ getComponent({
+ ...defaultInviteProps,
+ pendingInvites: {
+ ...defaultInviteProps.pendingInvites,
+ role: 'member',
+ },
+ setTeams: mockSetTeams,
+ })
+ );
+ const teamInput = screen.getByLabelText('Add to Team');
+ expect(teamInput).toBeEnabled();
+ await userEvent.click(teamInput);
+ await userEvent.click(screen.getByText('#moo-deng'));
+ await userEvent.click(screen.getByText('#moo-waan'));
+ expect(mockSetTeams).toHaveBeenCalledTimes(2);
+ });
+});
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
new file mode 100644
index 00000000000000..b49a1e7ef32967
--- /dev/null
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
@@ -0,0 +1,227 @@
+import {useCallback, useState} from 'react';
+import type {MultiValueProps} from 'react-select';
+import type {Theme} from '@emotion/react';
+import {useTheme} from '@emotion/react';
+import styled from '@emotion/styled';
+
+import type {StylesConfig} from 'sentry/components/forms/controls/selectControl';
+import SelectControl from 'sentry/components/forms/controls/selectControl';
+import {useInviteMembersContext} from 'sentry/components/modals/inviteMembersModal/inviteMembersContext';
+import RoleSelectControl from 'sentry/components/roleSelectControl';
+import TeamSelector from 'sentry/components/teamSelector';
+import {t} from 'sentry/locale';
+import {space} from 'sentry/styles/space';
+import type {SelectValue} from 'sentry/types/core';
+import type {OrgRole} from 'sentry/types/organization';
+
+import renderEmailValue from './renderEmailValue';
+import type {InviteStatus} from './types';
+
+type SelectOption = SelectValue;
+
+type Props = {
+ roleDisabledUnallowed: boolean;
+ roleOptions: OrgRole[];
+};
+
+function ValueComponent(
+ props: MultiValueProps,
+ inviteStatus: InviteStatus
+) {
+ return renderEmailValue(inviteStatus[props.data.value], props);
+}
+
+function mapToOptions(values: string[]): SelectOption[] {
+ return values.map(value => ({value, label: value}));
+}
+
+function InviteRowControl({roleDisabledUnallowed, roleOptions}: Props) {
+ const {inviteStatus, pendingInvites, setEmails, setRole, setTeams, reset} =
+ useInviteMembersContext();
+ const emails = [...(pendingInvites.emails ?? [])];
+ const role = pendingInvites.role ?? '';
+ const teams = [...(pendingInvites.teams ?? [])];
+
+ const onChangeEmails = (opts: SelectOption[]) => {
+ setEmails(opts?.map(v => v.value) ?? [], 0);
+ };
+ const onChangeRole = (value: SelectOption) => setRole(value?.value, 0);
+ const onChangeTeams = (opts: SelectOption[]) =>
+ setTeams(opts ? opts.map(v => v.value) : [], 0);
+
+ const [inputValue, setInputValue] = useState('');
+
+ const theme = useTheme();
+
+ const isTeamRolesAllowedForRole = useCallback<(roleId: string) => boolean>(
+ roleId => {
+ const roleOptionsMap = roleOptions.reduce(
+ (rolesMap, roleOption) => ({...rolesMap, [roleOption.id]: roleOption}),
+ {}
+ );
+ return roleOptionsMap[roleId]?.isTeamRolesAllowed ?? true;
+ },
+ [roleOptions]
+ );
+ const isTeamRolesAllowed = isTeamRolesAllowedForRole(role);
+
+ const handleKeyDown = (e: React.KeyboardEvent) => {
+ switch (e.key) {
+ case 'Enter':
+ case ',':
+ case ' ':
+ e.preventDefault();
+ handleInput(inputValue);
+ setInputValue('');
+ break;
+ default:
+ // do nothing.
+ }
+ };
+
+ const handleInput = input => {
+ const newEmails = input.trim() ? input.trim().split(/[\s,]+/) : [];
+ if (newEmails.length > 0) {
+ onChangeEmails([
+ ...mapToOptions(emails),
+ ...newEmails.map(email => ({label: email, value: email})),
+ ]);
+ }
+ };
+
+ return (
+
+
+ Email addresses
+
+ ValueComponent(props, inviteStatus),
+ DropdownIndicator: () => null,
+ }}
+ options={mapToOptions(emails)}
+ onBlur={(e: React.ChangeEvent) => {
+ handleInput(e.target.value);
+ }}
+ styles={getStyles(theme, inviteStatus)}
+ onInputChange={setInputValue}
+ onKeyDown={handleKeyDown}
+ onChange={onChangeEmails}
+ multiple
+ creatable
+ clearable
+ onClear={reset}
+ menuIsOpen={false}
+ />
+
+
+
+
+ Role
+ {
+ onChangeRole(roleOption);
+ if (!isTeamRolesAllowedForRole(roleOption.value)) {
+ onChangeTeams([]);
+ }
+ }}
+ />
+
+
+ Add to team
+
+
+
+
+ );
+}
+
+/**
+ * The email select control has custom selected item states as items
+ * show their delivery status after the form is submitted.
+ */
+function getStyles(theme: Theme, inviteStatus: InviteStatus): StylesConfig {
+ return {
+ multiValue: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ ...(status?.error
+ ? {
+ color: theme.red400,
+ border: `1px solid ${theme.red300}`,
+ backgroundColor: theme.red100,
+ }
+ : {}),
+ };
+ },
+ multiValueLabel: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ pointerEvents: 'all',
+ ...(status?.error ? {color: theme.red400} : {}),
+ };
+ },
+ multiValueRemove: (provided, {data}: MultiValueProps) => {
+ const status = inviteStatus[data.value];
+ return {
+ ...provided,
+ ...(status?.error
+ ? {
+ borderLeft: `1px solid ${theme.red300}`,
+ ':hover': {backgroundColor: theme.red100, color: theme.red400},
+ }
+ : {}),
+ };
+ },
+ };
+}
+
+const Heading = styled('div')`
+ margin-bottom: ${space(1)};
+ font-weight: ${p => p.theme.fontWeightBold};
+ text-transform: uppercase;
+ font-size: ${p => p.theme.fontSizeSmall};
+`;
+
+const RowWrapper = styled('div')`
+ display: flex;
+ flex-direction: column;
+ gap: ${space(1.5)};
+`;
+
+const EmailWrapper = styled('div')`
+ &:focus-within {
+ display: grid;
+ }
+`;
+
+const RoleTeamWrapper = styled('div')`
+ display: grid;
+ gap: ${space(1.5)};
+ grid-template-columns: 1fr 1fr;
+ align-items: start;
+`;
+
+export default InviteRowControl;
diff --git a/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx b/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
index b43b1aec777880..c508e76f20ac4a 100644
--- a/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
+++ b/static/app/components/modals/inviteMembersModal/inviteStatusMessage.tsx
@@ -4,10 +4,63 @@ import LoadingIndicator from 'sentry/components/loadingIndicator';
import {IconCheckmark, IconWarning} from 'sentry/icons';
import {t, tct, tn} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
import type {InviteStatus} from './types';
-interface Props {
+interface InviteCountProps {
+ count: number;
+ label: string;
+ isRequest?: boolean;
+}
+
+function InviteCount({count, label, isRequest}: InviteCountProps) {
+ return (
+
+ {isRequest
+ ? tn('%s invite request', '%s invite requests', count)
+ : tn('%s invite', '%s invites', count)}
+
+ );
+}
+
+interface CountMessageProps {
+ errorCount: number;
+ sentCount: number;
+ isRequest?: boolean;
+}
+
+function CountMessage({sentCount, errorCount, isRequest}: CountMessageProps) {
+ const invites = (
+
+ );
+ const failedInvites = (
+
+ );
+ const tctComponents = {
+ invites,
+ failed: errorCount,
+ failedInvites,
+ };
+ return (
+
+ {sentCount > 0 && (
+
+
+ {tct('[invites] sent.', tctComponents)}
+
+ )}
+ {errorCount > 0 && (
+
+
+ {tct('[failedInvites] failed to send.', tctComponents)}
+
+ )}
+
+ );
+}
+
+interface InviteStatusMessageProps {
complete: boolean;
hasDuplicateEmails: boolean;
inviteStatus: InviteStatus;
@@ -21,7 +74,10 @@ export default function InviteStatusMessage({
inviteStatus,
sendingInvites,
willInvite,
-}: Props) {
+}: InviteStatusMessageProps) {
+ const organization = useOrganization();
+ const isNewInviteModal = organization.features.includes('invite-members-new-modal');
+
if (sendingInvites) {
return (
@@ -38,8 +94,22 @@ export default function InviteStatusMessage({
const sentCount = statuses.filter(i => i.sent).length;
const errorCount = statuses.filter(i => i.error).length;
+ if (isNewInviteModal) {
+ return (
+
+ );
+ }
+
if (willInvite) {
- const invites = {tn('%s invite', '%s invites', sentCount)} ;
+ const invites = (
+
+ {tn('%s invite', '%s invites', sentCount)}
+
+ );
const tctComponents = {
invites,
failed: errorCount,
@@ -57,12 +127,15 @@ export default function InviteStatusMessage({
);
}
const inviteRequests = (
- {tn('%s invite request', '%s invite requests', sentCount)}
+
+ {tn('%s invite request', '%s invite requests', sentCount)}
+
);
const tctComponents = {
inviteRequests,
failed: errorCount,
};
+
return (
@@ -76,6 +149,7 @@ export default function InviteStatusMessage({
);
}
+ // TODO(mia): remove once old modal is removed
if (hasDuplicateEmails) {
return (
@@ -88,14 +162,26 @@ export default function InviteStatusMessage({
return null;
}
-export const StatusMessage = styled('div')<{status?: 'success' | 'error'}>`
+export const StatusMessage = styled('div')<{
+ isNewInviteModal?: boolean;
+ status?: 'success' | 'error';
+}>`
display: flex;
gap: ${space(1)};
align-items: center;
font-size: ${p => p.theme.fontSizeMedium};
- color: ${p => (p.status === 'error' ? p.theme.errorText : p.theme.textColor)};
+ color: ${p =>
+ p.status === 'error' && !p.isNewInviteModal ? p.theme.errorText : p.theme.textColor};
> :first-child {
- ${p => p.status === 'success' && `color: ${p.theme.successText}`};
+ ${p =>
+ p.status === 'success'
+ ? `color: ${p.theme.successText}`
+ : p.status === 'error' && p.isNewInviteModal && `color: ${p.theme.errorText}`};
}
`;
+
+export const BoldCount = styled('div')`
+ display: inline;
+ font-weight: bold;
+`;
diff --git a/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx b/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
index deb02ff34f7640..a883838f0a4cda 100644
--- a/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
+++ b/static/app/components/modals/inviteMembersModal/renderEmailValue.tsx
@@ -6,6 +6,7 @@ import LoadingIndicator from 'sentry/components/loadingIndicator';
import {Tooltip} from 'sentry/components/tooltip';
import {IconCheckmark, IconWarning} from 'sentry/icons';
import {space} from 'sentry/styles/space';
+import useOrganization from 'sentry/utils/useOrganization';
import type {InviteStatus} from './types';
@@ -13,6 +14,7 @@ function renderEmailValue(
status: InviteStatus[string],
valueProps: MultiValueProps
) {
+ const organization = useOrganization();
const {children, ...props} = valueProps;
const error = status?.error;
@@ -25,7 +27,9 @@ function renderEmailValue (
{children}
{!status.sent && !status.error && }
{status.error && }
- {status.sent && }
+ {status.sent && !organization.features.includes('invite-members-new-modal') && (
+
+ )}
);
diff --git a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
index 3226afe66d1bb9..939100b0eb9ffe 100644
--- a/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
+++ b/static/app/components/modals/inviteMembersModal/useInviteModal.tsx
@@ -2,6 +2,7 @@ import {useCallback, useEffect, useMemo, useRef, useState} from 'react';
import type {
InviteRow,
+ InviteStatus,
NormalizedInvite,
} from 'sentry/components/modals/inviteMembersModal/types';
import {t} from 'sentry/locale';
@@ -162,9 +163,30 @@ export default function useInviteModal({organization, initialData, source}: Prop
[api, organization, willInvite]
);
+ const removeSentInvites = useCallback(() => {
+ setState(prev => {
+ const emails = prev.pendingInvites[0].emails;
+ const filteredEmails = Array.from(emails).filter(
+ email => !prev.inviteStatus[email]?.sent
+ );
+ return {
+ ...prev,
+ pendingInvites: [
+ {
+ ...prev.pendingInvites[0],
+ emails: new Set(filteredEmails),
+ },
+ ],
+ };
+ });
+ }, []);
+
const sendInvites = useCallback(async () => {
setState(prev => ({...prev, sendingInvites: true}));
await Promise.all(invites.map(sendInvite));
+ if (organization.features.includes('invite-members-new-modal')) {
+ removeSentInvites();
+ }
setState(prev => ({...prev, sendingInvites: false, complete: true}));
trackAnalytics(
@@ -174,7 +196,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
modal_session: sessionId.current,
}
);
- }, [organization, invites, sendInvite, willInvite]);
+ }, [organization, invites, sendInvite, willInvite, removeSentInvites]);
const addInviteRow = useCallback(() => {
setState(prev => ({
@@ -210,6 +232,12 @@ export default function useInviteModal({organization, initialData, source}: Prop
});
}, []);
+ const setInviteStatus = useCallback((inviteStatus: InviteStatus) => {
+ setState(prev => {
+ return {...prev, inviteStatus};
+ });
+ }, []);
+
const removeInviteRow = useCallback((index: number) => {
setState(prev => {
const pendingInvites = [...prev.pendingInvites];
@@ -229,6 +257,7 @@ export default function useInviteModal({organization, initialData, source}: Prop
setEmails,
setRole,
setTeams,
+ setInviteStatus,
willInvite,
complete: state.complete,
inviteStatus: state.inviteStatus,
From bac8e216ba72e1d6493ffa162365a5e5c609707b Mon Sep 17 00:00:00 2001
From: Snigdha Sharma
Date: Thu, 3 Oct 2024 14:14:10 -0700
Subject: [PATCH 102/139] chore(snuba-search): Cleanup and add new logs
(#78544)
---
src/sentry/search/snuba/executors.py | 18 ------------------
src/sentry/utils/snuba.py | 8 ++++++++
2 files changed, 8 insertions(+), 18 deletions(-)
diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py
index 27f2eefd713948..7c3e1e9017ea8f 100644
--- a/src/sentry/search/snuba/executors.py
+++ b/src/sentry/search/snuba/executors.py
@@ -1720,13 +1720,6 @@ def query(
: max_candidates + 1
]
)
- self.logger.info(
- "GroupAttributesExecutor: found snuba candidates",
- extra={
- "count": len(group_ids_to_pass_to_snuba),
- "max_candidates": max_candidates,
- },
- )
span.set_data("Max Candidates", max_candidates)
span.set_data("Result Size", len(group_ids_to_pass_to_snuba))
@@ -1734,7 +1727,6 @@ def query(
metrics.incr(
"snuba.search.group_attributes.too_many_candidates", skip_internal=False
)
- self.logger.info("GroupAttributesExecutor: too many candidates")
group_ids_to_pass_to_snuba = None
# remove the search filters that are only for postgres
@@ -1778,10 +1770,6 @@ def query(
# limit groups and events to the group ids
for entity_with_group_id in [attr_entity, joined_entity]:
- self.logger.info(
- "GroupAttributesExecutor: adding group_id filter to entity",
- extra={"entity": entity_with_group_id.name},
- )
where_conditions.append(
Condition(
Column("group_id", entity_with_group_id),
@@ -1876,12 +1864,6 @@ def query(
select.append(sort_func)
- if group_ids_to_pass_to_snuba is not None:
- self.logger.info(
- "GroupAttributesExecutor: conditions",
- extra={"where_conditions": where_conditions},
- )
-
query = Query(
match=Join([Relationship(joined_entity, "attributes_inner", attr_entity)]),
select=select,
diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py
index bc112c9606a479..045f103604300a 100644
--- a/src/sentry/utils/snuba.py
+++ b/src/sentry/utils/snuba.py
@@ -1180,6 +1180,14 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet:
elif error["type"] == "schema":
raise SchemaValidationError(error["message"])
elif error["type"] == "invalid_query":
+ logger.warning(
+ "UnqualifiedQueryError",
+ extra={
+ "error": error["message"],
+ "has_data": "data" in body and body["data"] is not None,
+ "query": snuba_requests_list[index].request.serialize(),
+ },
+ )
raise UnqualifiedQueryError(error["message"])
elif error["type"] == "clickhouse":
raise clickhouse_error_codes_map.get(error["code"], QueryExecutionError)(
From 2f4fa604848a1a66f329db0423c33691f7fab44c Mon Sep 17 00:00:00 2001
From: Ryan Skonnord
Date: Thu, 3 Oct 2024 14:20:30 -0700
Subject: [PATCH 103/139] feat(integrations): Let lifecycle capture more log
data after entering (#78574)
Suggested by @iamrajjoshi
---
src/sentry/integrations/utils/metrics.py | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/src/sentry/integrations/utils/metrics.py b/src/sentry/integrations/utils/metrics.py
index 2dcdf719fd4c46..e04b0ccb6a8c24 100644
--- a/src/sentry/integrations/utils/metrics.py
+++ b/src/sentry/integrations/utils/metrics.py
@@ -104,6 +104,14 @@ def __init__(self, payload: EventLifecycleMetric, assume_success: bool = True) -
self.payload = payload
self.assume_success = assume_success
self._state: EventLifecycleOutcome | None = None
+ self._extra = dict(self.payload.get_extras())
+
+ def add_extra(self, name: str, value: Any) -> None:
+ """Add a value to logged "extra" data.
+
+ Overwrites the name with a new value if it was previously used.
+ """
+ self._extra[name] = value
def record_event(
self, outcome: EventLifecycleOutcome, exc: BaseException | None = None
@@ -122,7 +130,7 @@ def record_event(
metrics.incr(key, sample_rate=sample_rate)
if outcome == EventLifecycleOutcome.FAILURE:
- logger.error(key, extra=self.payload.get_extras(), exc_info=exc)
+ logger.error(key, extra=self._extra, exc_info=exc)
def _terminate(
self, new_state: EventLifecycleOutcome, exc: BaseException | None = None
From e44e3dd7d412a6afce2b2e5adb5a54ba2ab17e1d Mon Sep 17 00:00:00 2001
From: getsentry-bot
Date: Thu, 3 Oct 2024 21:36:21 +0000
Subject: [PATCH 104/139] Revert "ref(replay): query custom tags from replays
dataset (#78573)"
This reverts commit 5bc828b169f46afe146248e8ca1bae5f7ab006d8.
Co-authored-by: aliu39 <159852527+aliu39@users.noreply.github.com>
---
static/app/views/alerts/rules/metric/types.tsx | 1 -
static/app/views/alerts/wizard/options.tsx | 2 +-
static/app/views/alerts/wizard/utils.tsx | 2 +-
static/app/views/replays/list/replaySearchBar.tsx | 13 ++++++++-----
4 files changed, 10 insertions(+), 8 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/types.tsx b/static/app/views/alerts/rules/metric/types.tsx
index 357f119c9714de..3bb2c0fc95f260 100644
--- a/static/app/views/alerts/rules/metric/types.tsx
+++ b/static/app/views/alerts/rules/metric/types.tsx
@@ -41,7 +41,6 @@ export enum Dataset {
/** Also used for crash free alerts */
METRICS = 'metrics',
ISSUE_PLATFORM = 'search_issues',
- REPLAYS = 'replays',
}
export enum EventTypes {
diff --git a/static/app/views/alerts/wizard/options.tsx b/static/app/views/alerts/wizard/options.tsx
index 04d93bb1dc0ad6..e25cd7bcefa5da 100644
--- a/static/app/views/alerts/wizard/options.tsx
+++ b/static/app/views/alerts/wizard/options.tsx
@@ -65,7 +65,7 @@ export enum MEPAlertsDataset {
export type MetricAlertType = Exclude;
export const DatasetMEPAlertQueryTypes: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
MEPAlertsQueryType
> = {
[Dataset.ERRORS]: MEPAlertsQueryType.ERROR,
diff --git a/static/app/views/alerts/wizard/utils.tsx b/static/app/views/alerts/wizard/utils.tsx
index be16a01def8d56..c52002d16544f0 100644
--- a/static/app/views/alerts/wizard/utils.tsx
+++ b/static/app/views/alerts/wizard/utils.tsx
@@ -6,7 +6,7 @@ import type {MetricAlertType, WizardRuleTemplate} from './options';
// A set of unique identifiers to be able to tie aggregate and dataset back to a wizard alert type
const alertTypeIdentifiers: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
Partial>
> = {
[Dataset.ERRORS]: {
diff --git a/static/app/views/replays/list/replaySearchBar.tsx b/static/app/views/replays/list/replaySearchBar.tsx
index a76486e6a20a01..d2ef35da9e2fbc 100644
--- a/static/app/views/replays/list/replaySearchBar.tsx
+++ b/static/app/views/replays/list/replaySearchBar.tsx
@@ -129,7 +129,7 @@ function ReplaySearchBar(props: Props) {
{
orgSlug: organization.slug,
projectIds: projectIds.map(String),
- dataset: Dataset.REPLAYS,
+ dataset: Dataset.ISSUE_PLATFORM,
useCache: true,
enabled: true,
keepPreviousData: false,
@@ -139,7 +139,7 @@ function ReplaySearchBar(props: Props) {
},
{}
);
- const customTags: TagCollection = useMemo(() => {
+ const issuePlatformTags: TagCollection = useMemo(() => {
return (tagQuery.data ?? []).reduce((acc, tag) => {
acc[tag.key] = {...tag, kind: FieldKind.TAG};
return acc;
@@ -147,10 +147,13 @@ function ReplaySearchBar(props: Props) {
}, [tagQuery]);
// tagQuery.isLoading and tagQuery.isError are not used
- const filterKeys = useMemo(() => getReplayFilterKeys(customTags), [customTags]);
+ const filterKeys = useMemo(
+ () => getReplayFilterKeys(issuePlatformTags),
+ [issuePlatformTags]
+ );
const filterKeySections = useMemo(() => {
- return getFilterKeySections(customTags, organization);
- }, [customTags, organization]);
+ return getFilterKeySections(issuePlatformTags, organization);
+ }, [issuePlatformTags, organization]);
const getTagValues = useCallback(
(tag: Tag, searchQuery: string): Promise => {
From 08057c670393601678c948660377bf1bfe44e212 Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Thu, 3 Oct 2024 15:13:11 -0700
Subject: [PATCH 105/139] fix(grouping): Stop storing secondary hashes for new
groups (#78558)
When an event comes in, we calculate its hash, and it doesn't match any existing groups, we sometimes* use an old grouping config to calculate a secondary hash, in hopes that that hash matches an existing group, to which we can then link the new hash. If that fails, though, (in other words, if the secondary hash also doesn't exist), the new hash will either get linked to a group via Seer or end up creating a new group. Once that happens, future events with the same hash value will always match with the new hash, making the secondary/backup hash no longer necessary.
Since we know we're never going to need it, there's no reason for us to store the secondary hash, but right now we do anyway. This fixes that by not creating the database record for the secondary hash (and not including it in any further processing) in cases where it doesn't match an existing group.
We already had a test testing that the record _is_ created, so testing this change only required changing `secondary_grouphash_exists_now: True` to `secondary_grouphash_exists_now: False` in the relevant test. The only other change necessary was to fix a test which had been relying on both primary and secondary hashes being created at once, to instead create them separately. Finally, this change lets us eliminate a test ensuring that we're storing the right grouping config on each hash, since now there is no second hash on which store data
*We calculate a secondary hash when a project is in a transition period from one grouping config to another, in order to preserve the continuity of existing groups.
---
src/sentry/grouping/ingest/hashing.py | 9 +++++-
.../grouping/test_assign_to_group.py | 20 ++++++++-----
.../grouping/test_grouphash_metadata.py | 28 -------------------
3 files changed, 21 insertions(+), 36 deletions(-)
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index 0db54211628061..00aea7d63e6b56 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -218,7 +218,14 @@ def find_grouphash_with_group(
def get_or_create_grouphashes(
project: Project, hashes: Sequence[str], grouping_config: str
) -> list[GroupHash]:
- grouphashes = []
+ is_secondary = grouping_config != project.get_option("sentry:grouping_config")
+ grouphashes: list[GroupHash] = []
+
+ # The only utility of secondary hashes is to link new primary hashes to an existing group.
+ # Secondary hashes which are also new are therefore of no value, so there's no need to store or
+ # annotate them and we can bail now.
+ if is_secondary and not GroupHash.objects.filter(project=project, hash__in=hashes).exists():
+ return grouphashes
for hash_value in hashes:
grouphash, created = GroupHash.objects.get_or_create(project=project, hash=hash_value)
diff --git a/tests/sentry/event_manager/grouping/test_assign_to_group.py b/tests/sentry/event_manager/grouping/test_assign_to_group.py
index 411f8d11d87025..2f5ff10b244054 100644
--- a/tests/sentry/event_manager/grouping/test_assign_to_group.py
+++ b/tests/sentry/event_manager/grouping/test_assign_to_group.py
@@ -242,8 +242,8 @@ def get_results_from_saving_event(
# doesn't find an existing group
# c) If the primary (or secondary, if it's calculated) hash finds a group, the event is
# assigned there
-# d) If neither finds a group, a new group is created and both the primary (and secondary,
-# if it's calculated) hashes are stored
+# d) If neither finds a group, a new group is created and the primary hash is stored (but
+# the secondary hash is not, even if it's calculated)
@django_db_all
@@ -276,7 +276,7 @@ def test_new_group(
"primary_grouphash_existed_already": False,
"secondary_grouphash_existed_already": False,
"primary_grouphash_exists_now": True,
- "secondary_grouphash_exists_now": True,
+ "secondary_grouphash_exists_now": False,
"result_tag_value_for_metrics": "no_match",
# Moot since no existing group was passed
"event_assigned_to_given_existing_group": None,
@@ -376,18 +376,24 @@ def test_existing_group_new_hash_exists(
# Set the stage by creating a group tied to the new hash (and possibly the legacy hash as well)
if secondary_hash_exists:
- existing_event = save_event_with_grouping_config(
+ existing_event_with_secondary_hash = save_event_with_grouping_config(
+ event_data, project, LEGACY_GROUPING_CONFIG
+ )
+ existing_event_with_primary_hash = save_event_with_grouping_config(
event_data, project, DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG, True
)
- group_id = existing_event.group_id
+ group_id = existing_event_with_primary_hash.group_id
+ assert (
+ existing_event_with_secondary_hash.group_id == existing_event_with_primary_hash.group_id
+ )
assert group_id is not None
assert GroupHash.objects.filter(project_id=project.id, group_id=group_id).count() == 2
else:
- existing_event = save_event_with_grouping_config(
+ existing_event_with_primary_hash = save_event_with_grouping_config(
event_data, project, DEFAULT_GROUPING_CONFIG
)
- group_id = existing_event.group_id
+ group_id = existing_event_with_primary_hash.group_id
assert group_id is not None
assert GroupHash.objects.filter(project_id=project.id, group_id=group_id).count() == 1
diff --git a/tests/sentry/event_manager/grouping/test_grouphash_metadata.py b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
index 0b91f6b629cc55..6d0b7a6ebc7660 100644
--- a/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
+++ b/tests/sentry/event_manager/grouping/test_grouphash_metadata.py
@@ -1,7 +1,5 @@
from __future__ import annotations
-from time import time
-
from sentry.models.grouphash import GroupHash
from sentry.models.grouphashmetadata import GroupHashMetadata
from sentry.projectoptions.defaults import DEFAULT_GROUPING_CONFIG, LEGACY_GROUPING_CONFIG
@@ -11,7 +9,6 @@
from sentry.testutils.helpers.features import with_feature
from sentry.testutils.helpers.options import override_options
from sentry.testutils.skips import requires_snuba
-from sentry.utils.types import NonNone
pytestmark = [requires_snuba]
@@ -89,28 +86,3 @@ def test_updates_grouping_config(self):
# Make sure we're dealing with a single grouphash that got updated rather than two different grouphashes
assert grouphash1 and grouphash2 and grouphash1.id == grouphash2.id
-
- @with_feature("organizations:grouphash-metadata-creation")
- def test_stores_correct_config_on_primary_and_secondary_hash(self):
- # Set the project to be in a grouping config transition so that primary and secondary hashes
- # will both be calculated, and include numbers in the message of one of the events sent to
- # Seer so that the primary and secondary hashes will be different (since the legacy config
- # won't parameterize the numbers)
- self.project.update_option("sentry:grouping_config", DEFAULT_GROUPING_CONFIG)
- self.project.update_option("sentry:secondary_grouping_config", LEGACY_GROUPING_CONFIG)
- self.project.update_option("sentry:secondary_grouping_expiry", time() + 3600)
-
- event = save_new_event({"message": "Dogs are great! 11211231"}, self.project)
-
- grouphashes = GroupHash.objects.filter(group_id=NonNone(event.group_id))
- assert len(grouphashes) == 2
-
- primary_grouphash = grouphashes.filter(hash=event.get_primary_hash()).first()
- secondary_grouphash = grouphashes.exclude(hash=event.get_primary_hash()).first()
-
- self.assert_metadata_value(
- primary_grouphash, "latest_grouping_config", DEFAULT_GROUPING_CONFIG
- )
- self.assert_metadata_value(
- secondary_grouphash, "latest_grouping_config", LEGACY_GROUPING_CONFIG
- )
From 8bb718a233b4cfd19f684a5b86152c14d51cc02a Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 15:22:01 -0700
Subject: [PATCH 106/139] fix(issues): Adjust next event padding (#78583)
---
static/app/views/issueDetails/streamline/eventNavigation.tsx | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/static/app/views/issueDetails/streamline/eventNavigation.tsx b/static/app/views/issueDetails/streamline/eventNavigation.tsx
index 967ef649339337..3795df083e5478 100644
--- a/static/app/views/issueDetails/streamline/eventNavigation.tsx
+++ b/static/app/views/issueDetails/streamline/eventNavigation.tsx
@@ -198,7 +198,7 @@ export const EventNavigation = forwardRef(
-
+
(
css={grayText}
/>
-
+
p.theme.gray100};
`;
From 0fc039cfe492e7b35b66f8347cc26561349d706c Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Thu, 3 Oct 2024 15:22:19 -0700
Subject: [PATCH 107/139] ref(grouping): Simplify code getting hashes (#78570)
ow that the hierarchical grouping code is gone, a number of our grouping helper functions do little enough that there's no longer a real reason to keep them separate. This refactors both `_hashes_from_sorted_grouping_variants` and `sort_grouping_variants` into one-liners and then pulls them into `get_hashes`.
Note: `_hashes_from_sorted_grouping_variants` has been returning the variant name along with each variant, but `get_hashes` is the only place we were calling it, and we were ignoring that part of the return value, so I eliminated it. Other than that, the refactors are all simplification, with no behavior changes.
---
src/sentry/eventstore/models.py | 35 +++++++++------------------------
src/sentry/grouping/api.py | 16 ---------------
2 files changed, 9 insertions(+), 42 deletions(-)
diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py
index 7f6c049de8bc19..28ca5ee35c8017 100644
--- a/src/sentry/eventstore/models.py
+++ b/src/sentry/eventstore/models.py
@@ -18,7 +18,7 @@
from sentry import eventtypes
from sentry.db.models import NodeData
-from sentry.grouping.variants import BaseVariant, KeyedVariants
+from sentry.grouping.variants import BaseVariant
from sentry.interfaces.base import Interface, get_interfaces
from sentry.issues.grouptype import GroupCategory
from sentry.issues.issue_occurrence import IssueOccurrence
@@ -353,38 +353,21 @@ def get_hashes(self, force_config: StrategyConfiguration | None = None) -> list[
return hashes
# Create fresh hashes
- from sentry.grouping.api import sort_grouping_variants
variants = self.get_grouping_variants(force_config)
- hashes = [
- hash_
- for _, hash_ in self._hashes_from_sorted_grouping_variants(
- sort_grouping_variants(variants)
- )
- ]
+ # Sort the variants so that the system variant (if any) is always last, in order to resolve
+ # ambiguities when choosing primary_hash for Snuba
+ sorted_variants = sorted(
+ variants.items(),
+ key=lambda name_and_variant: 1 if name_and_variant[0] == "system" else 0,
+ )
+ # Get each variant's hash value, filtering out Nones
+ hashes = list({variant.get_hash() for _, variant in sorted_variants} - {None})
# Write to event before returning
self.data["hashes"] = hashes
return hashes
- @staticmethod
- def _hashes_from_sorted_grouping_variants(
- variants: KeyedVariants,
- ) -> list[tuple[str, str]]:
- """Create hashes from variants and filter out duplicates and None values"""
-
- filtered_hashes = []
- seen_hashes = set()
- for name, variant in variants:
- hash_ = variant.get_hash()
- if hash_ is None or hash_ in seen_hashes:
- continue
-
- seen_hashes.add(hash_)
- filtered_hashes.append((name, hash_))
-
- return filtered_hashes
-
def normalize_stacktraces_for_grouping(self, grouping_config: StrategyConfiguration) -> None:
"""Normalize stacktraces and clear memoized interfaces
diff --git a/src/sentry/grouping/api.py b/src/sentry/grouping/api.py
index a7cab08a0b6572..cb06c7fb0489a9 100644
--- a/src/sentry/grouping/api.py
+++ b/src/sentry/grouping/api.py
@@ -26,7 +26,6 @@
ComponentVariant,
CustomFingerprintVariant,
FallbackVariant,
- KeyedVariants,
SaltedComponentVariant,
)
from sentry.models.grouphash import GroupHash
@@ -370,18 +369,3 @@ def get_grouping_variants_for_event(
rv["fallback"] = FallbackVariant()
return rv
-
-
-def sort_grouping_variants(variants: dict[str, BaseVariant]) -> KeyedVariants:
- """Sort a sequence of variants into flat variants"""
-
- flat_variants = []
-
- for name, variant in variants.items():
- flat_variants.append((name, variant))
-
- # Sort system variant to the back of the list to resolve ambiguities when
- # choosing primary_hash for Snuba
- flat_variants.sort(key=lambda name_and_variant: 1 if name_and_variant[0] == "system" else 0)
-
- return flat_variants
From bbfca977430270851a26ded7c69e5553410ae291 Mon Sep 17 00:00:00 2001
From: Scott Cooper
Date: Thu, 3 Oct 2024 15:28:53 -0700
Subject: [PATCH 108/139] fix(issues): Adjust viewers text color, duplicate
info (#78579)
---
.../group/streamlinedParticipantList.spec.tsx | 16 +++++++++---
.../group/streamlinedParticipantList.tsx | 25 +++++++++++++------
2 files changed, 31 insertions(+), 10 deletions(-)
diff --git a/static/app/components/group/streamlinedParticipantList.spec.tsx b/static/app/components/group/streamlinedParticipantList.spec.tsx
index 62ba12a621f796..915f31e17a660f 100644
--- a/static/app/components/group/streamlinedParticipantList.spec.tsx
+++ b/static/app/components/group/streamlinedParticipantList.spec.tsx
@@ -19,22 +19,32 @@ describe('ParticipantList', () => {
it('expands and collapses the list when clicked', async () => {
render( );
expect(screen.queryByText('#team-1')).not.toBeInTheDocument();
- await userEvent.click(screen.getByText('JD'));
+ await userEvent.click(screen.getByText('JD'), {skipHover: true});
expect(await screen.findByText('#team-1')).toBeInTheDocument();
expect(await screen.findByText('Bob Alice')).toBeInTheDocument();
expect(screen.getByText('Teams (2)')).toBeInTheDocument();
expect(screen.getByText('Individuals (2)')).toBeInTheDocument();
- await userEvent.click(screen.getAllByText('JD')[0]);
+ await userEvent.click(screen.getAllByText('JD')[0], {skipHover: true});
expect(screen.queryByText('Bob Alice')).not.toBeInTheDocument();
});
it('does not display section headers when there is only users or teams', async () => {
render( );
- await userEvent.click(screen.getByText('JD'));
+ await userEvent.click(screen.getByText('JD'), {skipHover: true});
expect(await screen.findByText('Bob Alice')).toBeInTheDocument();
expect(screen.queryByText('Teams')).not.toBeInTheDocument();
});
+
+ it('skips duplicate information between name and email', async () => {
+ const duplicateInfoUsers = [
+ UserFixture({id: '1', name: 'john.doe@example.com', email: 'john.doe@example.com'}),
+ ];
+ render( );
+ await userEvent.click(screen.getByText('J'), {skipHover: true});
+ // Would find two elements if it was duplicated
+ expect(await screen.findByText('john.doe@example.com')).toBeInTheDocument();
+ });
});
diff --git a/static/app/components/group/streamlinedParticipantList.tsx b/static/app/components/group/streamlinedParticipantList.tsx
index 30763a9902f059..b64409f0f81fd4 100644
--- a/static/app/components/group/streamlinedParticipantList.tsx
+++ b/static/app/components/group/streamlinedParticipantList.tsx
@@ -49,7 +49,9 @@ export default function ParticipantList({users, teams}: DropdownListProps) {
{`#${team.slug}`}
- {tn('%s member', '%s members', team.memberCount)}
+
+ {tn('%s member', '%s members', team.memberCount)}
+
))}
@@ -59,10 +61,12 @@ export default function ParticipantList({users, teams}: DropdownListProps) {
{users.map(user => (
-
- {user.name}
- {user.email}
-
+
+ {user.name}
+ {user.email !== user.name ? (
+ {user.email}
+ ) : null}
+
))}
@@ -82,6 +86,7 @@ const ParticipantListWrapper = styled('div')`
max-height: 325px;
overflow-y: auto;
border-radius: ${p => p.theme.borderRadius};
+ color: ${p => p.theme.textColor};
& > div:not(:last-child) {
border-bottom: 1px solid ${p => p.theme.border};
@@ -106,10 +111,16 @@ const UserRow = styled('div')`
gap: ${space(1)};
line-height: 1.2;
font-size: ${p => p.theme.fontSizeSmall};
+ min-height: 45px;
+`;
+
+const NameWrapper = styled('div')`
+ & > div:only-child {
+ margin-top: ${space(0.25)};
+ }
`;
-const SubText = styled('div')`
- color: ${p => p.theme.subText};
+const SmallText = styled('div')`
font-size: ${p => p.theme.fontSizeExtraSmall};
`;
From 136b788aca164a08a8785b544cb4feb9dc69a892 Mon Sep 17 00:00:00 2001
From: Michelle Fu <83109586+mifu67@users.noreply.github.com>
Date: Thu, 3 Oct 2024 15:31:14 -0700
Subject: [PATCH 109/139] fix(metric alerts): add http.url to default tags
(#78576)
`http.url` was missing from default tags, which was causing some issues
for users flagged in to default tags.
---
src/sentry/search/events/constants.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py
index a88c2429fc700f..3e202ac5e69c78 100644
--- a/src/sentry/search/events/constants.py
+++ b/src/sentry/search/events/constants.py
@@ -326,6 +326,7 @@ class ThresholdDict(TypedDict):
"histogram_outlier",
"http.method",
"http.status_code",
+ "http.url",
"measurement_rating",
"os.name",
"query_hash",
From 937ee131848fdef6bc4e957fc859fb84122dd3df Mon Sep 17 00:00:00 2001
From: Colleen O'Rourke
Date: Thu, 3 Oct 2024 15:33:57 -0700
Subject: [PATCH 110/139] ref(alerts): Update metric alert email template
(#78495)
Update "sensitivity" to become "responsiveness" for anomaly detection
email templates
Closes https://getsentry.atlassian.net/browse/ALRT-315
---
src/sentry/incidents/action_handlers.py | 2 +-
tests/sentry/incidents/action_handlers/test_email.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py
index 30b6260a3a5a1b..190ffdaebea093 100644
--- a/src/sentry/incidents/action_handlers.py
+++ b/src/sentry/incidents/action_handlers.py
@@ -370,7 +370,7 @@ def generate_incident_trigger_email_context(
threshold: None | str | float = None
if alert_rule.detection_type == AlertRuleDetectionType.DYNAMIC:
threshold_prefix_string = alert_rule.detection_type.title()
- threshold = f"({alert_rule.sensitivity} sensitivity)"
+ threshold = f"({alert_rule.sensitivity} responsiveness)"
alert_link_params["type"] = "anomaly_detection"
else:
threshold_prefix_string = ">" if show_greater_than_string else "<"
diff --git a/tests/sentry/incidents/action_handlers/test_email.py b/tests/sentry/incidents/action_handlers/test_email.py
index 3955b9825f6915..3bee199a0beffa 100644
--- a/tests/sentry/incidents/action_handlers/test_email.py
+++ b/tests/sentry/incidents/action_handlers/test_email.py
@@ -387,7 +387,7 @@ def test_dynamic_alert(self, mock_seer_request):
"incident_name": incident.title,
"aggregate": aggregate,
"query": action.alert_rule_trigger.alert_rule.snuba_query.query,
- "threshold": f"({alert_rule.sensitivity} sensitivity)",
+ "threshold": f"({alert_rule.sensitivity} responsiveness)",
"status": INCIDENT_STATUS[IncidentStatus(incident.status)],
"status_key": INCIDENT_STATUS[IncidentStatus(incident.status)].lower(),
"environment": "All",
From 2cd5b5778fce0d020d35b7563d8ca14743f6cda0 Mon Sep 17 00:00:00 2001
From: Alberto Leal
Date: Thu, 3 Oct 2024 19:41:26 -0400
Subject: [PATCH 111/139] chore(billing): Fix
buffer.process-incr.model-key-missing metric (#78291)
This removes useless tag for `buffer.process-incr.model-key-missing`
metric. `model_key` is always `None` here.
---
src/sentry/buffer/redis.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py
index 2dc7894f5c908f..8b19cbf05e9e30 100644
--- a/src/sentry/buffer/redis.py
+++ b/src/sentry/buffer/redis.py
@@ -529,7 +529,7 @@ def _generate_process_incr_kwargs(model_key: str | None) -> dict[str, Any]:
# model associated with the model_key.
process_incr_kwargs: dict[str, Any] = dict()
if model_key is None:
- metrics.incr("buffer.process-incr.model-key-missing", tags={"model_key": model_key})
+ metrics.incr("buffer.process-incr.model-key-missing")
return process_incr_kwargs
queue = pending_buffers_router.queue(model_key=model_key)
if queue is not None:
From c8d76e131bfdca5c55fe5c5e5012958a989030ed Mon Sep 17 00:00:00 2001
From: Andrew Liu <159852527+aliu39@users.noreply.github.com>
Date: Thu, 3 Oct 2024 18:46:27 -0700
Subject: [PATCH 112/139] ref(replay): query custom tags from replays dataset
(2) (#78581)
---
static/app/views/alerts/rules/metric/types.tsx | 1 +
static/app/views/alerts/wizard/options.tsx | 2 +-
static/app/views/alerts/wizard/utils.tsx | 2 +-
static/app/views/replays/list/replaySearchBar.tsx | 13 +++++--------
4 files changed, 8 insertions(+), 10 deletions(-)
diff --git a/static/app/views/alerts/rules/metric/types.tsx b/static/app/views/alerts/rules/metric/types.tsx
index 3bb2c0fc95f260..357f119c9714de 100644
--- a/static/app/views/alerts/rules/metric/types.tsx
+++ b/static/app/views/alerts/rules/metric/types.tsx
@@ -41,6 +41,7 @@ export enum Dataset {
/** Also used for crash free alerts */
METRICS = 'metrics',
ISSUE_PLATFORM = 'search_issues',
+ REPLAYS = 'replays',
}
export enum EventTypes {
diff --git a/static/app/views/alerts/wizard/options.tsx b/static/app/views/alerts/wizard/options.tsx
index e25cd7bcefa5da..04d93bb1dc0ad6 100644
--- a/static/app/views/alerts/wizard/options.tsx
+++ b/static/app/views/alerts/wizard/options.tsx
@@ -65,7 +65,7 @@ export enum MEPAlertsDataset {
export type MetricAlertType = Exclude;
export const DatasetMEPAlertQueryTypes: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
MEPAlertsQueryType
> = {
[Dataset.ERRORS]: MEPAlertsQueryType.ERROR,
diff --git a/static/app/views/alerts/wizard/utils.tsx b/static/app/views/alerts/wizard/utils.tsx
index c52002d16544f0..be16a01def8d56 100644
--- a/static/app/views/alerts/wizard/utils.tsx
+++ b/static/app/views/alerts/wizard/utils.tsx
@@ -6,7 +6,7 @@ import type {MetricAlertType, WizardRuleTemplate} from './options';
// A set of unique identifiers to be able to tie aggregate and dataset back to a wizard alert type
const alertTypeIdentifiers: Record<
- Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
+ Exclude, // IssuePlatform (search_issues) is not used in alerts, so we can exclude it here
Partial>
> = {
[Dataset.ERRORS]: {
diff --git a/static/app/views/replays/list/replaySearchBar.tsx b/static/app/views/replays/list/replaySearchBar.tsx
index d2ef35da9e2fbc..a76486e6a20a01 100644
--- a/static/app/views/replays/list/replaySearchBar.tsx
+++ b/static/app/views/replays/list/replaySearchBar.tsx
@@ -129,7 +129,7 @@ function ReplaySearchBar(props: Props) {
{
orgSlug: organization.slug,
projectIds: projectIds.map(String),
- dataset: Dataset.ISSUE_PLATFORM,
+ dataset: Dataset.REPLAYS,
useCache: true,
enabled: true,
keepPreviousData: false,
@@ -139,7 +139,7 @@ function ReplaySearchBar(props: Props) {
},
{}
);
- const issuePlatformTags: TagCollection = useMemo(() => {
+ const customTags: TagCollection = useMemo(() => {
return (tagQuery.data ?? []).reduce((acc, tag) => {
acc[tag.key] = {...tag, kind: FieldKind.TAG};
return acc;
@@ -147,13 +147,10 @@ function ReplaySearchBar(props: Props) {
}, [tagQuery]);
// tagQuery.isLoading and tagQuery.isError are not used
- const filterKeys = useMemo(
- () => getReplayFilterKeys(issuePlatformTags),
- [issuePlatformTags]
- );
+ const filterKeys = useMemo(() => getReplayFilterKeys(customTags), [customTags]);
const filterKeySections = useMemo(() => {
- return getFilterKeySections(issuePlatformTags, organization);
- }, [issuePlatformTags, organization]);
+ return getFilterKeySections(customTags, organization);
+ }, [customTags, organization]);
const getTagValues = useCallback(
(tag: Tag, searchQuery: string): Promise => {
From b5d738765bf61a0b5fef666d7dbca5bac48044b3 Mon Sep 17 00:00:00 2001
From: getsentry-bot
Date: Fri, 4 Oct 2024 02:06:48 +0000
Subject: [PATCH 113/139] Revert "fix(metric alerts): add http.url to default
tags (#78576)"
This reverts commit 136b788aca164a08a8785b544cb4feb9dc69a892.
Co-authored-by: schew2381 <67301797+schew2381@users.noreply.github.com>
---
src/sentry/search/events/constants.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py
index 3e202ac5e69c78..a88c2429fc700f 100644
--- a/src/sentry/search/events/constants.py
+++ b/src/sentry/search/events/constants.py
@@ -326,7 +326,6 @@ class ThresholdDict(TypedDict):
"histogram_outlier",
"http.method",
"http.status_code",
- "http.url",
"measurement_rating",
"os.name",
"query_hash",
From 6b954eded7cd63fbc520244d455f7576a2a4dd23 Mon Sep 17 00:00:00 2001
From: Sigrid Huemer <32902192+s1gr1d@users.noreply.github.com>
Date: Fri, 4 Oct 2024 10:08:53 +0200
Subject: [PATCH 114/139] feat(platform): Add javascript-nuxt platform in FE
(#78467)
Frontend part for the Nuxt onboarding.
Backend part: https://github.com/getsentry/sentry/pull/76851
---
package.json | 2 +-
.../onboarding/frameworkSuggestionModal.tsx | 1 +
.../onboarding/productSelection.tsx | 4 +
static/app/data/platformCategories.tsx | 6 +
static/app/data/platformPickerCategories.tsx | 1 +
static/app/data/platforms.tsx | 7 +
.../javascript/nuxt.spec.tsx | 85 ++++
.../gettingStartedDocs/javascript/nuxt.tsx | 386 ++++++++++++++++++
static/app/types/project.tsx | 1 +
yarn.lock | 8 +-
10 files changed, 496 insertions(+), 5 deletions(-)
create mode 100644 static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
create mode 100644 static/app/gettingStartedDocs/javascript/nuxt.tsx
diff --git a/package.json b/package.json
index 7879824ff1b507..f6ae0a2a087712 100644
--- a/package.json
+++ b/package.json
@@ -141,7 +141,7 @@
"papaparse": "^5.3.2",
"pegjs": "^0.10.0",
"pegjs-loader": "^0.5.8",
- "platformicons": "^6.0.1",
+ "platformicons": "^6.0.3",
"po-catalog-loader": "2.1.0",
"prettier": "3.3.2",
"prismjs": "^1.29.0",
diff --git a/static/app/components/onboarding/frameworkSuggestionModal.tsx b/static/app/components/onboarding/frameworkSuggestionModal.tsx
index f1e21cdb56933e..877dcfcd3ff9be 100644
--- a/static/app/components/onboarding/frameworkSuggestionModal.tsx
+++ b/static/app/components/onboarding/frameworkSuggestionModal.tsx
@@ -46,6 +46,7 @@ export const topJavascriptFrameworks: PlatformKey[] = [
'javascript-nextjs',
'javascript-react',
'javascript-vue',
+ 'javascript-nuxt',
'javascript-angular',
'javascript-solid',
'javascript-solidstart',
diff --git a/static/app/components/onboarding/productSelection.tsx b/static/app/components/onboarding/productSelection.tsx
index 3ae11bf259310b..d8d1798c5d04ce 100644
--- a/static/app/components/onboarding/productSelection.tsx
+++ b/static/app/components/onboarding/productSelection.tsx
@@ -119,6 +119,10 @@ export const platformProductAvailability = {
ProductSolution.PERFORMANCE_MONITORING,
ProductSolution.SESSION_REPLAY,
],
+ 'javascript-nuxt': [
+ ProductSolution.PERFORMANCE_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
'javascript-angular': [
ProductSolution.PERFORMANCE_MONITORING,
ProductSolution.SESSION_REPLAY,
diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx
index b30c69a502dfa1..79c76885db50c3 100644
--- a/static/app/data/platformCategories.tsx
+++ b/static/app/data/platformCategories.tsx
@@ -21,6 +21,7 @@ export const frontend: PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -228,6 +229,7 @@ export const platformsWithNestedInstrumentationGuides: PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -289,6 +291,7 @@ export const profiling: PlatformKey[] = [
'node-koa',
'node-connect',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-remix',
'javascript-solidstart',
'javascript-sveltekit',
@@ -340,6 +343,7 @@ export const releaseHealth: PlatformKey[] = [
'javascript-gatsby',
'javascript-vue',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-remix',
'javascript-solid',
'javascript-solidstart',
@@ -445,6 +449,7 @@ export const replayFrontendPlatforms: readonly PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
@@ -631,6 +636,7 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [
'javascript-ember',
'javascript-gatsby',
'javascript-nextjs',
+ 'javascript-nuxt',
'javascript-react',
'javascript-remix',
'javascript-solid',
diff --git a/static/app/data/platformPickerCategories.tsx b/static/app/data/platformPickerCategories.tsx
index e22d5b470b0fcc..2a8e73c4111a7e 100644
--- a/static/app/data/platformPickerCategories.tsx
+++ b/static/app/data/platformPickerCategories.tsx
@@ -45,6 +45,7 @@ const browser: Set = new Set([
'javascript-svelte',
'javascript-sveltekit',
'javascript-vue',
+ 'javascript-nuxt',
'unity',
]);
diff --git a/static/app/data/platforms.tsx b/static/app/data/platforms.tsx
index e6aa1559bcae4d..3fd8d5b6880ed8 100644
--- a/static/app/data/platforms.tsx
+++ b/static/app/data/platforms.tsx
@@ -354,6 +354,13 @@ export const platforms: PlatformIntegration[] = [
language: 'javascript',
link: 'https://docs.sentry.io/platforms/javascript/guides/vue/',
},
+ {
+ id: 'javascript-nuxt',
+ name: 'Nuxt',
+ type: 'framework',
+ language: 'javascript',
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/',
+ },
{
id: 'kotlin',
name: 'Kotlin',
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
new file mode 100644
index 00000000000000..ab3a3f8578e521
--- /dev/null
+++ b/static/app/gettingStartedDocs/javascript/nuxt.spec.tsx
@@ -0,0 +1,85 @@
+import {renderWithOnboardingLayout} from 'sentry-test/onboarding/renderWithOnboardingLayout';
+import {screen} from 'sentry-test/reactTestingLibrary';
+import {textWithMarkupMatcher} from 'sentry-test/utils';
+
+import {ProductSolution} from 'sentry/components/onboarding/productSelection';
+
+import docs from './nuxt';
+
+describe('javascript-nuxt onboarding docs', function () {
+ it('renders onboarding docs correctly', () => {
+ renderWithOnboardingLayout(docs);
+
+ // Renders main headings
+ expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Upload Source Maps'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument();
+
+ // Includes 2 import statements
+ expect(
+ screen.getAllByText(
+ textWithMarkupMatcher(/import \* as Sentry from "@sentry\/nuxt"/)
+ )
+ ).toHaveLength(2);
+ });
+
+ it('displays sample rates by default', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.PERFORMANCE_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
+ });
+
+ expect(screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate/))).toHaveLength(
+ 2
+ ); // client and server
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate/))
+ ).toBeInTheDocument(); // only client
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate/))
+ ).toBeInTheDocument(); // only client
+ });
+
+ it('enables performance setting the tracesSampleRate to 1', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.PERFORMANCE_MONITORING,
+ ],
+ });
+
+ expect(
+ screen.getAllByText(textWithMarkupMatcher(/tracesSampleRate: 1\.0/))
+ ).toHaveLength(2);
+ });
+
+ it('enables replay by setting replay samplerates', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [
+ ProductSolution.ERROR_MONITORING,
+ ProductSolution.SESSION_REPLAY,
+ ],
+ });
+
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysSessionSampleRate: 0\.1/))
+ ).toBeInTheDocument();
+ expect(
+ screen.getByText(textWithMarkupMatcher(/replaysOnErrorSampleRate: 1\.0/))
+ ).toBeInTheDocument();
+ });
+
+ it('enables profiling by setting profiling sample rates', () => {
+ renderWithOnboardingLayout(docs, {
+ selectedProducts: [ProductSolution.ERROR_MONITORING, ProductSolution.PROFILING],
+ });
+
+ expect(
+ screen.getAllByText(textWithMarkupMatcher(/profilesSampleRate: 1\.0/))
+ ).toHaveLength(2);
+ });
+});
diff --git a/static/app/gettingStartedDocs/javascript/nuxt.tsx b/static/app/gettingStartedDocs/javascript/nuxt.tsx
new file mode 100644
index 00000000000000..8bda0a48756bab
--- /dev/null
+++ b/static/app/gettingStartedDocs/javascript/nuxt.tsx
@@ -0,0 +1,386 @@
+import {Fragment} from 'react';
+import styled from '@emotion/styled';
+
+import Alert from 'sentry/components/alert';
+import ExternalLink from 'sentry/components/links/externalLink';
+import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout';
+import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout';
+import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage';
+import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import type {
+ Docs,
+ DocsParams,
+ OnboardingConfig,
+} from 'sentry/components/onboarding/gettingStartedDoc/types';
+import {
+ getCrashReportJavaScriptInstallStep,
+ getCrashReportModalConfigDescription,
+ getCrashReportModalIntroduction,
+ getFeedbackConfigureDescription,
+} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding';
+import {getJSMetricsOnboarding} from 'sentry/components/onboarding/gettingStartedDoc/utils/metricsOnboarding';
+import {MaybeBrowserProfilingBetaWarning} from 'sentry/components/onboarding/gettingStartedDoc/utils/profilingOnboarding';
+import {
+ getReplayConfigOptions,
+ getReplayConfigureDescription,
+} from 'sentry/components/onboarding/gettingStartedDoc/utils/replayOnboarding';
+import {t, tct} from 'sentry/locale';
+
+type Params = DocsParams;
+
+const getNuxtModuleSnippet = () => `
+export default defineNuxtConfig({
+ modules: ["@sentry/nuxt/module"],
+});
+`;
+
+const getSdkClientSetupSnippet = (params: Params) => `
+import * as Sentry from "@sentry/nuxt";
+
+Sentry.init({
+ // If set up, you can use your runtime config here
+ // dsn: useRuntimeConfig().public.sentry.dsn,
+ dsn: "${params.dsn.public}",${
+ params.isReplaySelected
+ ? `
+ integrations: [Sentry.replayIntegration(${getReplayConfigOptions(params.replayOptions)})],`
+ : ''
+ }${
+ params.isPerformanceSelected
+ ? `
+ // Tracing
+ // We recommend adjusting this value in production, or using a tracesSampler for finer control.
+ tracesSampleRate: 1.0, // Capture 100% of the transactions
+ // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
+ tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],`
+ : ''
+ }${
+ params.isReplaySelected
+ ? `
+ // Session Replay
+ replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
+ replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.`
+ : ''
+ }${
+ params.isProfilingSelected
+ ? `
+ // Set profilesSampleRate to 1.0 to profile every transaction.
+ // Since profilesSampleRate is relative to tracesSampleRate,
+ // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate
+ // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
+ // results in 25% of transactions being profiled (0.5*0.5=0.25)
+ profilesSampleRate: 1.0,`
+ : ''
+ }
+});
+`;
+
+const getSdkServerSetupSnippet = (params: Params) => `
+import * as Sentry from "@sentry/nuxt";
+
+Sentry.init({
+ dsn: "${params.dsn.public}",${
+ params.isPerformanceSelected
+ ? `
+ // Tracing
+ // We recommend adjusting this value in production, or using a tracesSampler for finer control.
+ tracesSampleRate: 1.0, // Capture 100% of the transactions
+ // Set 'tracePropagationTargets' to control for which URLs distributed tracing should be enabled
+ tracePropagationTargets: ["localhost", /^https:\\/\\/yourserver\\.io\\/api/],`
+ : ''
+ }${
+ params.isProfilingSelected
+ ? `
+ // Set profilesSampleRate to 1.0 to profile every transaction.
+ // Since profilesSampleRate is relative to tracesSampleRate,
+ // the final profiling rate can be computed as tracesSampleRate * profilesSampleRate
+ // For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
+ // results in 25% of transactions being profiled (0.5*0.5=0.25)
+ profilesSampleRate: 1.0,`
+ : ''
+ }
+});
+`;
+
+const getVerifyNuxtSnippet = () => `
+
+
+
+ Trigger Error
+ `;
+
+const getInstallConfig = () => [
+ {
+ language: 'bash',
+ code: [
+ {
+ label: 'npm',
+ value: 'npm',
+ language: 'bash',
+ code: 'npm install --save @sentry/nuxt',
+ },
+ {
+ label: 'yarn',
+ value: 'yarn',
+ language: 'bash',
+ code: 'yarn add @sentry/nuxt',
+ },
+ {
+ label: 'pnpm',
+ value: 'pnpm',
+ language: 'bash',
+ code: `pnpm add @sentry/nuxt`,
+ },
+ ],
+ },
+];
+
+const onboarding: OnboardingConfig = {
+ introduction: params => (
+
+
+
+ {tct(
+ 'In this quick guide you’ll use [strong:npm], [strong:yarn] or [strong:pnpm] to set up:',
+ {
+ strong: ,
+ }
+ )}
+
+
+ ),
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: t(
+ 'Add the Sentry Nuxt SDK as a dependency using your preferred package manager:'
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ configurations: [
+ {
+ description: tct(
+ 'Add the Sentry Nuxt module in your [code:nuxt.config.ts] file:',
+ {code:
}
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'nuxt.config.ts',
+ code: getNuxtModuleSnippet(),
+ },
+ ],
+ },
+ {
+ description: tct(
+ 'For the client, create a [codeFile:sentry.client.config.ts] file in your project root and initialize the Sentry SDK:',
+ {codeFile:
}
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'sentry.client.config.ts',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ },
+ {
+ description: (
+
+
+ {tct(
+ 'For the server, create a [codeFile:sentry.server.config.ts] file in your project root and initialize the Sentry SDK:',
+ {codeFile:
}
+ )}
+
+
+
+ {tct(
+ 'To complete the server-side setup, follow the [link:Sentry Nuxt docs] for guidance. Nuxt compiles your code in ESM on the server side as well, so the deployment setup can get tricky depending on where you deploy your application.',
+ {
+ link: (
+
+ ),
+ }
+ )}
+
+
+ ),
+ code: [
+ {
+ label: 'TypeScript',
+ value: 'typescript',
+ language: 'typescript',
+ filename: 'sentry.server.config.ts',
+ code: getSdkServerSetupSnippet(params),
+ },
+ ],
+ },
+ ],
+ },
+ {
+ title: t('Upload Source Maps'),
+ description: tct(
+ 'To upload source maps to Sentry, follow the [link:instructions in our documentation].',
+ {
+ link: (
+
+ ),
+ }
+ ),
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: t(
+ "This snippet contains an intentional error and can be used as a test to make sure that everything's working as expected."
+ ),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'Vue',
+ value: 'vue',
+ language: 'html',
+ code: getVerifyNuxtSnippet(),
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ nextSteps: () => [
+ {
+ id: 'nuxt-features',
+ name: t('Nuxt Features'),
+ description: t('Learn about our first class integration with the Nuxt framework.'),
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/features/',
+ },
+ ],
+};
+
+const replayOnboarding: OnboardingConfig = {
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: tct(
+ 'You need a minimum version 8.9.1 of [code:@sentry/nuxt] in order to use Session Replay. You do not need to install any additional packages.',
+ {
+ code:
,
+ }
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ description: getReplayConfigureDescription({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/session-replay/',
+ }),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'JavaScript',
+ value: 'javascript',
+ language: 'javascript',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ additionalInfo: ,
+ },
+ ],
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const feedbackOnboarding: OnboardingConfig = {
+ install: () => [
+ {
+ type: StepType.INSTALL,
+ description: tct(
+ 'For the User Feedback integration to work, you must have the Sentry browser SDK package, or an equivalent framework SDK (e.g. [code:@sentry/nuxt]) installed, minimum version 7.85.0.',
+ {
+ code:
,
+ }
+ ),
+ configurations: getInstallConfig(),
+ },
+ ],
+ configure: (params: Params) => [
+ {
+ type: StepType.CONFIGURE,
+ description: getFeedbackConfigureDescription({
+ linkConfig:
+ 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/',
+ linkButton:
+ 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/#bring-your-own-button',
+ }),
+ configurations: [
+ {
+ code: [
+ {
+ label: 'JavaScript',
+ value: 'javascript',
+ language: 'javascript',
+ code: getSdkClientSetupSnippet(params),
+ },
+ ],
+ },
+ ],
+ additionalInfo: crashReportCallout({
+ link: 'https://docs.sentry.io/platforms/nuxt/guides/nuxt/user-feedback/#crash-report-modal',
+ }),
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const crashReportOnboarding: OnboardingConfig = {
+ introduction: () => getCrashReportModalIntroduction(),
+ install: (params: Params) => getCrashReportJavaScriptInstallStep(params),
+ configure: () => [
+ {
+ type: StepType.CONFIGURE,
+ description: getCrashReportModalConfigDescription({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/configuration/#crash-report-modal',
+ }),
+ additionalInfo: widgetCallout({
+ link: 'https://docs.sentry.io/platforms/javascript/guides/nuxt/user-feedback/#user-feedback-widget',
+ }),
+ },
+ ],
+ verify: () => [],
+ nextSteps: () => [],
+};
+
+const docs: Docs = {
+ onboarding,
+ feedbackOnboardingNpm: feedbackOnboarding,
+ replayOnboarding,
+ customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}),
+ crashReportOnboarding,
+};
+
+const StyledAlert = styled(Alert)`
+ margin-bottom: 0;
+`;
+
+export default docs;
diff --git a/static/app/types/project.tsx b/static/app/types/project.tsx
index 74e5e7051b3650..d5bd5dcf76c278 100644
--- a/static/app/types/project.tsx
+++ b/static/app/types/project.tsx
@@ -217,6 +217,7 @@ export type PlatformKey =
| 'javascript-ember'
| 'javascript-gatsby'
| 'javascript-nextjs'
+ | 'javascript-nuxt'
| 'javascript-react'
| 'javascript-remix'
| 'javascript-solid'
diff --git a/yarn.lock b/yarn.lock
index a4a6ec76b4329d..e2b13bfd92edb1 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -9810,10 +9810,10 @@ platform@^1.3.3:
resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
-platformicons@^6.0.1:
- version "6.0.1"
- resolved "https://registry.yarnpkg.com/platformicons/-/platformicons-6.0.1.tgz#69e2cc1a1fe1533804e8223f9382295379db8bca"
- integrity sha512-RaT4+NfxuV2nqFxAZNaksa1UXKZS5QXdtvFc2y9A4Sp5Blt888jbSQJ7mUcmjw+Aqoq6oyz2cQvOCf4uymSizA==
+platformicons@^6.0.3:
+ version "6.0.3"
+ resolved "https://registry.yarnpkg.com/platformicons/-/platformicons-6.0.3.tgz#c167ab68c1cdb1a4d97665118b65ebccf7dcbe00"
+ integrity sha512-Z83ePiRqlA8yXQI1Y28/xv/7hGW/2BajJESxbnTjQkBjs2vGFAQjMs5E/mkOJoNuDKfEeaHgD7xOqajvARd4AQ==
dependencies:
"@types/node" "*"
"@types/react" "*"
From 0b32802018fd0ff5e797bb904a5acc3b5aa1a943 Mon Sep 17 00:00:00 2001
From: "Armen Zambrano G." <44410+armenzg@users.noreply.github.com>
Date: Fri, 4 Oct 2024 08:45:53 -0400
Subject: [PATCH 115/139] feat(issue_platform): Support deleting groups and
related data (#77794)
This allows the Issue Platform to delete groups and all related
occurrence events.
---
src/sentry/api/helpers/group_index/delete.py | 10 ++-
src/sentry/deletions/defaults/group.py | 79 ++++++++++++++++++-
src/sentry/features/temporary.py | 2 +
tests/sentry/deletions/test_group.py | 39 +++++----
.../api/endpoints/test_project_group_index.py | 14 +++-
5 files changed, 121 insertions(+), 23 deletions(-)
diff --git a/src/sentry/api/helpers/group_index/delete.py b/src/sentry/api/helpers/group_index/delete.py
index b2bd74552f1812..b29d978327ef5e 100644
--- a/src/sentry/api/helpers/group_index/delete.py
+++ b/src/sentry/api/helpers/group_index/delete.py
@@ -10,13 +10,14 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import audit_log, eventstream
+from sentry import audit_log, eventstream, features
from sentry.api.base import audit_logger
from sentry.deletions.tasks.groups import delete_groups as delete_groups_task
from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group, GroupStatus
from sentry.models.grouphash import GroupHash
from sentry.models.groupinbox import GroupInbox
+from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.signals import issue_deleted
from sentry.tasks.delete_seer_grouping_records import call_delete_seer_grouping_records_by_hash
@@ -140,7 +141,12 @@ def delete_groups(
if not group_list:
return Response(status=204)
- if any(group.issue_category != GroupCategory.ERROR for group in group_list):
+ org = Organization.objects.get_from_cache(id=organization_id)
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", org, actor=request.user
+ )
+ non_error_group_found = any(group.issue_category != GroupCategory.ERROR for group in group_list)
+ if not issue_platform_deletion_allowed and non_error_group_found:
raise rest_framework.exceptions.ValidationError(detail="Only error issues can be deleted.")
groups_by_project_id = defaultdict(list)
diff --git a/src/sentry/deletions/defaults/group.py b/src/sentry/deletions/defaults/group.py
index 0bb2f497bf813d..42c14e16ecc542 100644
--- a/src/sentry/deletions/defaults/group.py
+++ b/src/sentry/deletions/defaults/group.py
@@ -5,13 +5,16 @@
from collections.abc import Mapping, Sequence
from typing import Any
-from sentry import eventstore, eventstream, models, nodestore
+from snuba_sdk import DeleteQuery, Request
+
+from sentry import eventstore, eventstream, features, models, nodestore
from sentry.eventstore.models import Event
from sentry.issues.grouptype import GroupCategory
from sentry.models.group import Group, GroupStatus
from sentry.models.rulefirehistory import RuleFireHistory
from sentry.snuba.dataset import Dataset
from sentry.tasks.delete_seer_grouping_records import call_delete_seer_grouping_records_by_hash
+from sentry.utils.snuba import bulk_snuba_queries
from ..base import BaseDeletionTask, BaseRelation, ModelDeletionTask, ModelRelation
from ..manager import DeletionTaskManager
@@ -132,6 +135,7 @@ def chunk(self) -> bool:
# As long as it returns True the task will keep iterating
return True
else:
+ # Now that all events have been deleted from the eventstore, we can delete the events from snuba
self.delete_events_from_snuba()
return False
@@ -159,6 +163,57 @@ def delete_events_from_snuba(self) -> None:
eventstream.backend.end_delete_groups(eventstream_state)
+class IssuePlatformEventsDeletionTask(EventsBaseDeletionTask):
+ """
+ This class helps delete Issue Platform events which use the new Clickhouse light deletes.
+ """
+
+ dataset = Dataset.IssuePlatform
+
+ def chunk(self) -> bool:
+ """This method is called to delete chunks of data. It returns a boolean to say
+ if the deletion has completed and if it needs to be called again."""
+ events = self.get_unfetched_events()
+ if events:
+ # Ideally, in some cases, we should also delete the associated event from the Nodestore.
+ # In the occurrence_consumer [1] we sometimes create a new event but it's hard in post-ingestion to distinguish between
+ # a created event and an existing one.
+ # https://github.com/getsentry/sentry/blob/a86b9b672709bc9c4558cffb2c825965b8cee0d1/src/sentry/issues/occurrence_consumer.py#L324-L339
+ self.delete_events_from_nodestore(events)
+ # This value will be used in the next call to chunk
+ self.last_event = events[-1]
+ # As long as it returns True the task will keep iterating
+ return True
+ else:
+ # Now that all events have been deleted from the eventstore, we can delete the occurrences from Snuba
+ self.delete_events_from_snuba()
+ return False
+
+ def delete_events_from_nodestore(self, events: Sequence[Event]) -> None:
+ # We delete by the occurrence_id instead of the event_id
+ node_ids = [
+ Event.generate_node_id(event.project_id, event._snuba_data["occurrence_id"])
+ for event in events
+ ]
+ nodestore.backend.delete_multi(node_ids)
+
+ def delete_events_from_snuba(self) -> None:
+ requests = []
+ for project_id, group_ids in self.project_groups.items():
+ query = DeleteQuery(
+ self.dataset.value,
+ column_conditions={"project_id": [project_id], "group_id": group_ids},
+ )
+ request = Request(
+ dataset=self.dataset.value,
+ app_id=self.referrer,
+ query=query,
+ tenant_ids=self.tenant_ids,
+ )
+ requests.append(request)
+ bulk_snuba_queries(requests)
+
+
class GroupDeletionTask(ModelDeletionTask[Group]):
# Delete groups in blocks of 1000. Using 1000 aims to
# balance the number of snuba replacements with memory limits.
@@ -194,13 +249,29 @@ def _delete_children(self, instance_list: Sequence[Group]) -> None:
for model in _GROUP_RELATED_MODELS:
child_relations.append(ModelRelation(model, {"group_id__in": group_ids}))
- error_groups, _ = separate_by_group_category(instance_list)
+ org = instance_list[0].project.organization
+ issue_platform_deletion_allowed = features.has(
+ "organizations:issue-platform-deletion", org, actor=None
+ )
+ error_groups, issue_platform_groups = separate_by_group_category(instance_list)
# If this isn't a retention cleanup also remove event data.
if not os.environ.get("_SENTRY_CLEANUP"):
- if error_groups:
- params = {"groups": error_groups}
+ if not issue_platform_deletion_allowed:
+ params = {"groups": instance_list}
child_relations.append(BaseRelation(params=params, task=ErrorEventsDeletionTask))
+ else:
+ if error_groups:
+ params = {"groups": error_groups}
+ child_relations.append(
+ BaseRelation(params=params, task=ErrorEventsDeletionTask)
+ )
+
+ if issue_platform_groups:
+ params = {"groups": issue_platform_groups}
+ child_relations.append(
+ BaseRelation(params=params, task=IssuePlatformEventsDeletionTask)
+ )
self.delete_children(child_relations)
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 06e16837f9504c..869e338c4c477a 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -122,6 +122,8 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:ds-org-recalibration", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enables data secrecy mode
manager.add("organizations:enterprise-data-secrecy", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
+ # Enable issue platform deletion
+ manager.add("organizations:issue-platform-deletion", OrganizationFeature, FeatureHandlerStrategy.FLAGPOLE, api_expose=True)
# Enable archive/escalating issue workflow features in v2
manager.add("organizations:escalating-issues-v2", OrganizationFeature, FeatureHandlerStrategy.INTERNAL, api_expose=False)
# Enable emiting escalating data to the metrics backend
diff --git a/tests/sentry/deletions/test_group.py b/tests/sentry/deletions/test_group.py
index 5095133f44ec4a..7a093fdb9ed3f9 100644
--- a/tests/sentry/deletions/test_group.py
+++ b/tests/sentry/deletions/test_group.py
@@ -203,11 +203,17 @@ def create_occurrence(
)
return occurrence, issue_platform_group
+ def select_error_events(self, project_id: int) -> object:
+ columns = ["event_id", "group_id"]
+ return self.select_rows(Entity(EntityKey.Events.value), columns, project_id)
+
def select_issue_platform_events(self, project_id: int) -> object:
columns = ["event_id", "group_id", "occurrence_id"]
return self.select_rows(Entity(EntityKey.IssuePlatform.value), columns, project_id)
- def select_rows(self, entity: Entity, columns: list[str], project_id: int) -> object:
+ def select_rows(
+ self, entity: Entity, columns: list[str], project_id: int
+ ) -> None | dict[str, object]:
# Adding the random microseconds is to circumvent Snuba's caching mechanism
now = datetime.now()
start_time = now - timedelta(days=1, microseconds=random.randint(0, 100000000))
@@ -227,7 +233,7 @@ def select_rows(self, entity: Entity, columns: list[str], project_id: int) -> ob
tenant_ids=self.tenant_ids,
)
results = bulk_snuba_queries([request])[0]["data"]
- return results
+ return results[0] if results else None
@property
def tenant_ids(self) -> dict[str, str]:
@@ -235,7 +241,8 @@ def tenant_ids(self) -> dict[str, str]:
def test_issue_platform(self) -> None:
# Adding this query here to make sure that the cache is not being used
- assert self.select_issue_platform_events(self.project.id) == []
+ assert self.select_error_events(self.project.id) is None
+ assert self.select_issue_platform_events(self.project.id) is None
# Create initial error event and occurrence related to it; two different groups will exist
event = self.store_event(data={}, project_id=self.project.id)
occurrence, group_info = self.create_occurrence(event, type_id=FeedbackGroup.type_id)
@@ -248,28 +255,28 @@ def test_issue_platform(self) -> None:
assert event.group.issue_category == GroupCategory.ERROR
assert issue_platform_group.issue_category != GroupCategory.ERROR
# Assert that the occurrence has been inserted in Snuba
- expected = [
- {
- "event_id": event.event_id,
- "group_id": issue_platform_group.id,
- "occurrence_id": occurrence.id,
- }
- ]
- assert self.select_issue_platform_events(self.project.id) == expected
+ error_expected = {"event_id": event.event_id, "group_id": event.group_id}
+ occurrence_expected = {
+ "event_id": event.event_id,
+ "group_id": issue_platform_group.id,
+ "occurrence_id": occurrence.id,
+ }
+ assert self.select_error_events(self.project.id) == error_expected
+ assert self.select_issue_platform_events(self.project.id) == occurrence_expected
- # This will delete the group and the events from the node store
- with self.tasks():
+ # This will delete the group and the events from the node store and Snuba
+ with self.tasks(), self.feature({"organizations:issue-platform-deletion": True}):
delete_groups(object_ids=[issue_platform_group.id])
# The original event and group still exist
assert Group.objects.filter(id=event.group_id).exists()
event_node_id = Event.generate_node_id(event.project_id, event.event_id)
assert nodestore.backend.get(event_node_id)
-
+ assert self.select_error_events(self.project.id) == error_expected
# The Issue Platform group and occurrence are deleted
assert issue_platform_group.issue_type == FeedbackGroup
assert not Group.objects.filter(id=issue_platform_group.id).exists()
occurrence_node_id = Event.generate_node_id(occurrence.project_id, occurrence.id)
assert not nodestore.backend.get(occurrence_node_id)
- # We don't yet delete the occurrence from Snuba but it will expire with the TTL
- assert self.select_issue_platform_events(self.project.id) == expected
+ # Assert that occurrence is gone
+ assert self.select_issue_platform_events(self.project.id) is None
diff --git a/tests/snuba/api/endpoints/test_project_group_index.py b/tests/snuba/api/endpoints/test_project_group_index.py
index d001f87ca93e5d..b1c73243fc62d6 100644
--- a/tests/snuba/api/endpoints/test_project_group_index.py
+++ b/tests/snuba/api/endpoints/test_project_group_index.py
@@ -32,7 +32,7 @@
from sentry.models.release import Release
from sentry.silo.base import SiloMode
from sentry.testutils.cases import APITestCase, SnubaTestCase
-from sentry.testutils.helpers import parse_link_header, with_feature
+from sentry.testutils.helpers import Feature, parse_link_header, with_feature
from sentry.testutils.helpers.datetime import before_now, iso_format
from sentry.testutils.silo import assume_test_silo_mode
from sentry.types.activity import ActivityType
@@ -1580,6 +1580,12 @@ def test_delete_performance_issue_by_id(self, mock_eventstream):
assert response.status_code == 400
self.assert_groups_not_deleted([group1, group2])
+ # We are allowed to delete the groups with the feature flag enabled
+ with Feature({"organizations:issue-platform-deletion": True}), self.tasks():
+ response = self.client.delete(url, format="json")
+ assert response.status_code == 204
+ self.assert_groups_are_gone([group1, group2])
+
def test_bulk_delete(self):
groups_to_create = []
for _ in range(10, 41):
@@ -1617,3 +1623,9 @@ def test_bulk_delete_performance_issues(self):
# We do not support issue platform deletions
assert response.status_code == 400
self.assert_groups_not_deleted(groups)
+
+ # We are allowed to delete the groups with the feature flag enabled
+ with Feature({"organizations:issue-platform-deletion": True}), self.tasks():
+ response = self.client.delete(url, format="json")
+ assert response.status_code == 204
+ self.assert_groups_are_gone(groups)
From d2a696809492ce15f4c1e082f8f09c31d8e784ae Mon Sep 17 00:00:00 2001
From: Dominik Buszowiecki <44422760+DominikB2014@users.noreply.github.com>
Date: Fri, 4 Oct 2024 09:00:27 -0400
Subject: [PATCH 116/139] feat(insights): add view trends button mobile domain
view (#78552)
---
.../insights/common/viewTrendsButton.tsx | 27 +++++++++
.../screens/views/screenDetailsPage.tsx | 59 +++++++------------
.../screens/views/screensLandingPage.tsx | 6 +-
.../pages/mobile/mobileOverviewPage.tsx | 5 +-
.../pages/mobile/mobilePageHeader.tsx | 54 ++++++++++-------
5 files changed, 85 insertions(+), 66 deletions(-)
create mode 100644 static/app/views/insights/common/viewTrendsButton.tsx
diff --git a/static/app/views/insights/common/viewTrendsButton.tsx b/static/app/views/insights/common/viewTrendsButton.tsx
new file mode 100644
index 00000000000000..5c546ac467ab46
--- /dev/null
+++ b/static/app/views/insights/common/viewTrendsButton.tsx
@@ -0,0 +1,27 @@
+import {Button} from 'sentry/components/button';
+import {t} from 'sentry/locale';
+import {useLocation} from 'sentry/utils/useLocation';
+import {useNavigate} from 'sentry/utils/useNavigate';
+import useOrganization from 'sentry/utils/useOrganization';
+import {trendsTargetRoute} from 'sentry/views/performance/utils';
+
+export function ViewTrendsButton() {
+ const location = useLocation();
+ const organization = useOrganization();
+ const navigate = useNavigate();
+
+ const handleTrendsClick = () => {
+ const target = trendsTargetRoute({organization, location});
+ navigate(target);
+ };
+ return (
+ handleTrendsClick()}
+ >
+ {t('View Trends')}
+
+ );
+}
diff --git a/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx b/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
index 83cd48d72a003c..fba18d214fb99c 100644
--- a/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
+++ b/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
@@ -99,6 +99,21 @@ export function ScreenDetailsPage() {
});
}
+ const tabList = (
+
+ {tabs.map(tab => {
+ const visible =
+ tab.feature === undefined || organization.features?.includes(tab.feature);
+ return (
+
+ {tab.label}
+ {tab.alpha && }
+
+ );
+ })}
+
+ );
+
return (
@@ -116,47 +131,17 @@ export function ScreenDetailsPage() {
{isProjectCrossPlatform && }
-
-
- {tabs.map(tab => {
- const visible =
- tab.feature === undefined ||
- organization.features?.includes(tab.feature);
- return (
-
- {tab.label}
- {tab.alpha && }
-
- );
- })}
-
+ {tabList}
)}
{isInDomainView && (
-
-
-
-
- {isProjectCrossPlatform && }
-
-
-
- {/* TODO - There's two sets of tabs here, we'll have to do some UI work here */}
-
- {tabs.map(tab => {
- const visible =
- tab.feature === undefined ||
- organization.features?.includes(tab.feature);
- return (
-
- {tab.label}
- {tab.alpha && }
-
- );
- })}
-
-
+ }
+ />
)}
diff --git a/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx b/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
index 22c5b37f779672..cd0dc9e594aac6 100644
--- a/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
+++ b/static/app/views/insights/mobile/screens/views/screensLandingPage.tsx
@@ -314,11 +314,7 @@ export function ScreensLandingPage() {
)}
- {isInDomainView && (
-
-
-
- )}
+ {isInDomainView && }
diff --git a/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx b/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
index 0e9b4ba933504d..cc3c1cbd34ff9c 100644
--- a/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
+++ b/static/app/views/insights/pages/mobile/mobileOverviewPage.tsx
@@ -22,6 +22,7 @@ import useProjects from 'sentry/utils/useProjects';
import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout';
import {ToolRibbon} from 'sentry/views/insights/common/components/ribbon';
import {useOnboardingProject} from 'sentry/views/insights/common/queries/useOnboardingProject';
+import {ViewTrendsButton} from 'sentry/views/insights/common/viewTrendsButton';
import {MobileHeader} from 'sentry/views/insights/pages/mobile/mobilePageHeader';
import {OVERVIEW_PAGE_TITLE} from 'sentry/views/insights/pages/settings';
import {
@@ -169,9 +170,7 @@ function MobileOverviewPage() {
organization={organization}
renderDisabled={NoAccess}
>
-
-
-
+ } />
diff --git a/static/app/views/insights/pages/mobile/mobilePageHeader.tsx b/static/app/views/insights/pages/mobile/mobilePageHeader.tsx
index 1e5f7805c1441f..02969b3f704de8 100644
--- a/static/app/views/insights/pages/mobile/mobilePageHeader.tsx
+++ b/static/app/views/insights/pages/mobile/mobilePageHeader.tsx
@@ -25,12 +25,14 @@ import {MODULE_TITLES} from 'sentry/views/insights/settings';
import {ModuleName} from 'sentry/views/insights/types';
type Props = {
- hideTabs?: boolean;
+ headerActions?: React.ReactNode;
+ hideDefaultTabs?: boolean;
module?: ModuleName;
+ tabs?: {onTabChange: (key: string) => void; tabList: React.ReactNode; value: string};
};
// TODO - add props to append to breadcrumbs and change title
-export function MobileHeader({module, hideTabs}: Props) {
+export function MobileHeader({module, hideDefaultTabs, headerActions, tabs}: Props) {
const navigate = useNavigate();
const {slug} = useOrganization();
const moduleURLBuilder = useModuleURLBuilder();
@@ -57,7 +59,7 @@ export function MobileHeader({module, hideTabs}: Props) {
},
];
- const handleTabChange = (key: ModuleName | typeof OVERVIEW_PAGE_TITLE) => {
+ const defaultHandleTabChange = (key: ModuleName | typeof OVERVIEW_PAGE_TITLE) => {
if (key === module || (key === OVERVIEW_PAGE_TITLE && !module)) {
return;
}
@@ -71,27 +73,37 @@ export function MobileHeader({module, hideTabs}: Props) {
navigate(`${moduleURLBuilder(key as RoutableModuleNames)}/`);
};
+ const tabValue =
+ hideDefaultTabs && tabs?.value ? tabs.value : module ?? OVERVIEW_PAGE_TITLE;
+
+ const handleTabChange =
+ hideDefaultTabs && tabs ? tabs.onTabChange : defaultHandleTabChange;
+
return (
-
-
-
+
+
+
+
- {MOBILE_LANDING_TITLE}
-
-
-
-
-
-
- {!hideTabs && (
-
- {OVERVIEW_PAGE_TITLE}
-
- {MODULE_TITLES[ModuleName.MOBILE_SCREENS]}
-
-
- )}
+ {MOBILE_LANDING_TITLE}
+
+
+
+ {headerActions}
+
+
+
+ {!hideDefaultTabs && (
+
+ {OVERVIEW_PAGE_TITLE}
+
+ {MODULE_TITLES[ModuleName.MOBILE_SCREENS]}
+
+
+ )}
+ {hideDefaultTabs && tabs && tabs.tabList}
+
);
From 21ead34f2398c986997d028c8618a56e00c5b17f Mon Sep 17 00:00:00 2001
From: George Gritsouk <989898+gggritso@users.noreply.github.com>
Date: Fri, 4 Oct 2024 09:46:41 -0400
Subject: [PATCH 117/139] ref(dashboards): Replace Project Details score cards
with `BigNumberWidget` (#78566)
Replaces usage of `ProjectCard` in project details pages with
[`BigNumberWidget`](https://sentry.sentry.io/stories/?name=app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.stories.tsx).
A small refactor, to clean stuff up and try that component out. A few
immediate wins:
- consistent rounding and truncation
- a little less code duplication
- uncovered at least one bug
- nicer loading and error states
- full value on hover
Closes https://github.com/getsentry/sentry/issues/77780
---
.../dashboards/widgets/common/settings.tsx | 2 +-
.../dashboards/widgets/common/widgetFrame.tsx | 15 +++-
.../projectAnrScoreCard.spec.tsx | 6 +-
.../projectScoreCards/projectAnrScoreCard.tsx | 80 +++++++----------
.../projectApdexScoreCard.spec.tsx | 2 +-
.../projectApdexScoreCard.tsx | 80 ++++++-----------
.../projectScoreCards/projectScoreCards.tsx | 5 ++
.../projectStabilityScoreCard.tsx | 86 +++++++-----------
.../projectVelocityScoreCard.tsx | 90 ++++++-------------
9 files changed, 139 insertions(+), 227 deletions(-)
diff --git a/static/app/views/dashboards/widgets/common/settings.tsx b/static/app/views/dashboards/widgets/common/settings.tsx
index eba012ff0ef7eb..c52eb09413ae7d 100644
--- a/static/app/views/dashboards/widgets/common/settings.tsx
+++ b/static/app/views/dashboards/widgets/common/settings.tsx
@@ -1,7 +1,7 @@
import {t} from 'sentry/locale';
export const MIN_WIDTH = 200;
-export const MIN_HEIGHT = 120;
+export const MIN_HEIGHT = 96;
export const DEFAULT_FIELD = 'unknown'; // Numeric data might, in theory, have a missing field. In this case we need a fallback to provide to the field rendering pipeline. `'unknown'` will results in rendering as a string
diff --git a/static/app/views/dashboards/widgets/common/widgetFrame.tsx b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
index 80764a0d465ff8..3949502406ee9e 100644
--- a/static/app/views/dashboards/widgets/common/widgetFrame.tsx
+++ b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
@@ -1,6 +1,6 @@
import styled from '@emotion/styled';
-import {Button} from 'sentry/components/button';
+import {Button, LinkButton} from 'sentry/components/button';
import {HeaderTitle} from 'sentry/components/charts/styles';
import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu';
import QuestionTooltip from 'sentry/components/questionTooltip';
@@ -54,9 +54,15 @@ export function WidgetFrame(props: Props) {
{actions && actions.length > 0 && (
{actions.length === 1 ? (
-
- {actions[0].label}
-
+ actions[0].to ? (
+
+ {actions[0].label}
+
+ ) : (
+
+ {actions[0].label}
+
+ )
) : null}
{actions.length > 1 ? (
@@ -106,6 +112,7 @@ const Frame = styled('div')`
const Header = styled('div')`
display: flex;
flex-direction: column;
+ min-height: 20px;
`;
const Title = styled('div')`
diff --git a/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.spec.tsx b/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.spec.tsx
index 02a07b36a7e71f..57fdac8f0b34c7 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.spec.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.spec.tsx
@@ -108,8 +108,8 @@ describe('ProjectDetail > ProjectAnr', function () {
})
);
- await waitFor(() => expect(screen.getByText('11.562%')).toBeInTheDocument());
- await waitFor(() => expect(screen.getByText('0.03%')).toBeInTheDocument());
+ await waitFor(() => expect(screen.getByText('11.56%')).toBeInTheDocument());
+ await waitFor(() => expect(screen.getByText('3%')).toBeInTheDocument());
});
it('renders open in issues CTA', async function () {
@@ -127,7 +127,7 @@ describe('ProjectDetail > ProjectAnr', function () {
}
);
- await waitFor(() => expect(screen.getByText('11.562%')).toBeInTheDocument());
+ await waitFor(() => expect(screen.getByText('11.56%')).toBeInTheDocument());
expect(screen.getByRole('button', {name: 'View Issues'})).toHaveAttribute(
'href',
diff --git a/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.tsx
index a3397898076857..d9eb9fd1859c7f 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectAnrScoreCard.tsx
@@ -1,24 +1,19 @@
-import {Fragment, useEffect, useState} from 'react';
+import {useEffect, useState} from 'react';
import type {Location} from 'history';
import pick from 'lodash/pick';
-import round from 'lodash/round';
import {doSessionsRequest} from 'sentry/actionCreators/sessions';
-import {LinkButton} from 'sentry/components/button';
import {shouldFetchPreviousPeriod} from 'sentry/components/charts/utils';
import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse';
-import ScoreCard from 'sentry/components/scoreCard';
import {parseStatsPeriod} from 'sentry/components/timeRangeSelector/utils';
import {URL_PARAM} from 'sentry/constants/pageFilters';
-import {IconArrow} from 'sentry/icons/iconArrow';
import {t} from 'sentry/locale';
import type {PageFilters} from 'sentry/types/core';
import type {Organization, SessionApiResponse} from 'sentry/types/organization';
import {trackAnalytics} from 'sentry/utils/analytics';
import {getPeriod} from 'sentry/utils/duration/getPeriod';
-import {formatAbbreviatedNumber} from 'sentry/utils/formatters';
-import {formatPercentage} from 'sentry/utils/number/formatPercentage';
import useApi from 'sentry/utils/useApi';
+import {BigNumberWidget} from 'sentry/views/dashboards/widgets/bigNumberWidget/bigNumberWidget';
import {
getSessionTermDescription,
SessionTerm,
@@ -122,30 +117,12 @@ export function ProjectAnrScoreCard({
}, [start, end, period, api, organization.slug, environments, projects, query]);
const value = sessionsData?.groups?.[0]?.totals['anr_rate()'] ?? null;
-
const previousValue = previousSessionData?.groups?.[0]?.totals['anr_rate()'] ?? null;
- const hasCurrentAndPrevious = previousValue && value;
- const trend = hasCurrentAndPrevious ? round(value - previousValue, 4) : null;
- const trendStatus = !trend ? undefined : trend < 0 ? 'good' : 'bad';
-
if (!isProjectStabilized) {
return null;
}
- function renderTrend() {
- return trend ? (
-
- {trend >= 0 ? (
-
- ) : (
-
- )}
- {`${formatAbbreviatedNumber(Math.abs(trend))}\u0025`}
-
- ) : null;
- }
-
const endpointPath = `/organizations/${organization.slug}/issues/`;
const issueQuery = ['mechanism:[ANR,AppExitInfo]', query].join(' ').trim();
@@ -161,31 +138,38 @@ export function ProjectAnrScoreCard({
query: queryParams,
};
- function renderButton() {
- return (
- {
- trackAnalytics('project_detail.open_anr_issues', {
- organization,
- });
- }}
- >
- {t('View Issues')}
-
- );
- }
-
return (
- {
+ trackAnalytics('project_detail.open_anr_issues', {
+ organization,
+ });
+ },
+ },
+ ]}
/>
);
}
diff --git a/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.spec.tsx b/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.spec.tsx
index 7706032c1476d1..d25adcfa496ce3 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.spec.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.spec.tsx
@@ -61,7 +61,7 @@ describe('ProjectDetail > ProjectApdex', function () {
expect(await screen.findByText('Apdex')).toBeInTheDocument();
expect(await screen.findByText('0.781')).toBeInTheDocument();
- expect(await screen.findByText('0.103')).toBeInTheDocument();
+ expect(await screen.findByText('0.102')).toBeInTheDocument();
expect(currentDataEndpointMock).toHaveBeenNthCalledWith(
1,
diff --git a/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.tsx
index c20972d1141e89..2b188050f7ff5c 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectApdexScoreCard.tsx
@@ -1,20 +1,14 @@
-import {Fragment} from 'react';
-import round from 'lodash/round';
-
import {shouldFetchPreviousPeriod} from 'sentry/components/charts/utils';
-import Count from 'sentry/components/count';
-import LoadingError from 'sentry/components/loadingError';
import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse';
-import ScoreCard from 'sentry/components/scoreCard';
import {parseStatsPeriod} from 'sentry/components/timeRangeSelector/utils';
-import {IconArrow} from 'sentry/icons';
import {t} from 'sentry/locale';
import type {PageFilters} from 'sentry/types/core';
import type {Organization} from 'sentry/types/organization';
-import {defined} from 'sentry/utils';
import type {TableData} from 'sentry/utils/discover/discoverQuery';
import {getPeriod} from 'sentry/utils/duration/getPeriod';
import {useApiQuery} from 'sentry/utils/queryClient';
+import {BigNumberWidget} from 'sentry/views/dashboards/widgets/bigNumberWidget/bigNumberWidget';
+import {WidgetFrame} from 'sentry/views/dashboards/widgets/common/widgetFrame';
import {getTermHelp, PerformanceTerm} from 'sentry/views/performance/data';
import MissingPerformanceButtons from '../missingFeatureButtons/missingPerformanceButtons';
@@ -113,61 +107,41 @@ function ProjectApdexScoreCard(props: Props) {
const previousApdex = Number(previousData?.data?.[0]?.['apdex()']) || undefined;
- const trend =
- defined(apdex) && defined(previousApdex)
- ? round(apdex - previousApdex, 3)
- : undefined;
-
- const shouldRenderTrend = !isLoading && defined(apdex) && defined(trend);
-
const cardTitle = t('Apdex');
- let cardHelp = getTermHelp(organization, PerformanceTerm.APDEX);
-
- if (trend) {
- cardHelp += t(' This shows how it has changed since the last period.');
- }
+ const cardHelp = getTermHelp(organization, PerformanceTerm.APDEX);
if (!hasTransactions || !organization.features.includes('performance-view')) {
return (
- }
- />
- );
- }
-
- if (error) {
- return (
-
+
+
+
);
}
return (
- }
- trend={
- shouldRenderTrend ? (
-
- {trend >= 0 ? (
-
- ) : (
-
- )}
-
-
- ) : null
- }
- trendStatus={!trend ? undefined : trend > 0 ? 'good' : 'bad'}
+ description={cardHelp}
+ data={[
+ {
+ 'apdex()': apdex,
+ },
+ ]}
+ previousPeriodData={[
+ {
+ 'apdex()': previousApdex,
+ },
+ ]}
+ meta={{
+ fields: {
+ 'apdex()': 'number',
+ },
+ }}
+ preferredPolarity="+"
+ isLoading={isLoading}
+ error={error ?? undefined}
+ onRetry={refetch}
/>
);
}
diff --git a/static/app/views/projectDetail/projectScoreCards/projectScoreCards.tsx b/static/app/views/projectDetail/projectScoreCards/projectScoreCards.tsx
index 946e162e1f21df..497837d3d6be7d 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectScoreCards.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectScoreCards.tsx
@@ -83,6 +83,11 @@ function ProjectScoreCards({
}
const CardWrapper = styled('div')`
+ display: grid;
+ gap: ${space(2)};
+ grid-template-columns: 1fr;
+ margin-bottom: ${space(2)};
+
@media (min-width: ${p => p.theme.breakpoints.medium}) {
display: grid;
grid-column-gap: ${space(2)};
diff --git a/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx
index 9c30a5e33a3f79..357775ad9b5e2e 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectStabilityScoreCard.tsx
@@ -1,25 +1,19 @@
-import round from 'lodash/round';
-
import {
getDiffInMinutes,
shouldFetchPreviousPeriod,
} from 'sentry/components/charts/utils';
-import LoadingError from 'sentry/components/loadingError';
import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse';
-import ScoreCard from 'sentry/components/scoreCard';
import {DEFAULT_STATS_PERIOD} from 'sentry/constants';
-import {IconArrow} from 'sentry/icons';
import {t} from 'sentry/locale';
import type {PageFilters} from 'sentry/types/core';
import type {SessionApiResponse} from 'sentry/types/organization';
import {SessionFieldWithOperation} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
-import {defined} from 'sentry/utils';
import {getPeriod} from 'sentry/utils/duration/getPeriod';
-import {formatAbbreviatedNumber} from 'sentry/utils/formatters';
import {useApiQuery} from 'sentry/utils/queryClient';
import useOrganization from 'sentry/utils/useOrganization';
-import {displayCrashFreePercent} from 'sentry/views/releases/utils';
+import {BigNumberWidget} from 'sentry/views/dashboards/widgets/bigNumberWidget/bigNumberWidget';
+import {WidgetFrame} from 'sentry/views/dashboards/widgets/common/widgetFrame';
import {
getSessionTermDescription,
SessionTerm,
@@ -111,8 +105,6 @@ const useCrashFreeRate = (props: Props) => {
};
};
-// shouldRenderBadRequests = true;
-
function ProjectStabilityScoreCard(props: Props) {
const {hasSessions} = props;
const organization = useOrganization();
@@ -140,59 +132,41 @@ function ProjectStabilityScoreCard(props: Props) {
? undefined
: previousCrashFreeRate?.groups[0]?.totals[props.field] * 100;
- const trend =
- defined(score) && defined(previousScore)
- ? round(score - previousScore, 3)
- : undefined;
-
- const shouldRenderTrend = !isLoading && defined(score) && defined(trend);
-
if (hasSessions === false) {
return (
-
- }
- />
- );
- }
-
- if (error) {
- return (
-
+
+
+
);
}
return (
-
- {trend >= 0 ? (
-
- ) : (
-
- )}
- {`${formatAbbreviatedNumber(Math.abs(trend))}\u0025`}
-
- ) : null
- }
- trendStatus={!trend ? undefined : trend > 0 ? 'good' : 'bad'}
+ description={cardHelp}
+ data={[
+ {
+ [`${props.field}()`]: score ? score / 100 : undefined,
+ },
+ ]}
+ previousPeriodData={[
+ {
+ [`${props.field}()`]: previousScore ? previousScore / 100 : undefined,
+ },
+ ]}
+ meta={{
+ fields: {
+ [`${props.field}()`]: 'percentage',
+ },
+ }}
+ preferredPolarity="+"
+ isLoading={isLoading}
+ error={error ?? undefined}
+ onRetry={refetch}
/>
);
}
diff --git a/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
index dbf32a5dec0009..dd762e4a00dcca 100644
--- a/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
+++ b/static/app/views/projectDetail/projectScoreCards/projectVelocityScoreCard.tsx
@@ -1,17 +1,13 @@
-import {Fragment} from 'react';
-
import {shouldFetchPreviousPeriod} from 'sentry/components/charts/utils';
-import LoadingError from 'sentry/components/loadingError';
import {normalizeDateTimeParams} from 'sentry/components/organizations/pageFilters/parse';
-import ScoreCard from 'sentry/components/scoreCard';
import {parseStatsPeriod} from 'sentry/components/timeRangeSelector/utils';
-import {IconArrow} from 'sentry/icons';
import {t} from 'sentry/locale';
import type {PageFilters} from 'sentry/types/core';
import type {Organization} from 'sentry/types/organization';
-import {defined} from 'sentry/utils';
import {getPeriod} from 'sentry/utils/duration/getPeriod';
import {useApiQuery} from 'sentry/utils/queryClient';
+import {BigNumberWidget} from 'sentry/views/dashboards/widgets/bigNumberWidget/bigNumberWidget';
+import {WidgetFrame} from 'sentry/views/dashboards/widgets/common/widgetFrame';
import MissingReleasesButtons from '../missingFeatureButtons/missingReleasesButtons';
@@ -140,74 +136,46 @@ function ProjectVelocityScoreCard(props: Props) {
refetch,
} = useReleaseCount(props);
- const trend =
- defined(currentReleases) &&
- defined(previousReleases) &&
- currentReleases?.length !== API_LIMIT
- ? currentReleases.length - previousReleases.length
- : undefined;
-
- const shouldRenderTrend =
- !isLoading && defined(currentReleases) && defined(previousReleases) && defined(trend);
-
const noReleaseEver =
[...(allTimeReleases ?? []), ...(previousReleases ?? []), ...(allTimeReleases ?? [])]
.length === 0;
const cardTitle = t('Number of Releases');
- const cardHelp = trend
- ? t(
- 'The number of releases for this project and how it has changed since the last period.'
- )
- : t('The number of releases for this project.');
-
- if (noReleaseEver) {
- return (
- }
- />
- );
- }
+ const cardHelp = t('The number of releases for this project.');
- if (error) {
+ if (!isLoading && noReleaseEver) {
return (
-
+
+
+
);
}
return (
-
- {trend >= 0 ? (
-
- ) : (
-
- )}
- {Math.abs(trend)}
-
- ) : null
- }
- trendStatus={!trend ? undefined : trend > 0 ? 'good' : 'bad'}
+ description={cardHelp}
+ data={[
+ {
+ 'count()': currentReleases?.length,
+ },
+ ]}
+ previousPeriodData={[
+ {
+ 'count()': previousReleases?.length,
+ },
+ ]}
+ maximumValue={API_LIMIT}
+ meta={{
+ fields: {
+ 'count()': 'number',
+ },
+ }}
+ preferredPolarity="+"
+ isLoading={isLoading}
+ error={error ?? undefined}
+ onRetry={refetch}
/>
);
}
From 86f5776ab0bade028f1860d3863c4a0b1b9a2d46 Mon Sep 17 00:00:00 2001
From: Ogi <86684834+obostjancic@users.noreply.github.com>
Date: Fri, 4 Oct 2024 15:53:35 +0200
Subject: [PATCH 118/139] chore(metrics): remove create alert and create widget
actions (#78603)
---
.../modals/metricWidgetViewerModal.tsx | 14 +-
.../metricWidgetViewerModal/queries.tsx | 10 +-
.../alerts/rules/metric/details/body.tsx | 2 +-
.../views/alerts/rules/metric/ruleForm.tsx | 4 +-
static/app/views/dashboards/dashboard.tsx | 3 +-
.../app/views/dashboards/widgetCard/index.tsx | 37 ++-
static/app/views/metrics/layout.tsx | 12 +-
.../views/metrics/metricQueryContextMenu.tsx | 227 +++++++++---------
.../app/views/metrics/metricsBetaEndAlert.tsx | 23 +-
.../views/metrics/pageHeaderActions.spec.tsx | 4 +
.../app/views/metrics/pageHeaderActions.tsx | 135 ++++++-----
.../projectMetrics/projectMetrics.tsx | 4 +-
12 files changed, 255 insertions(+), 220 deletions(-)
diff --git a/static/app/components/modals/metricWidgetViewerModal.tsx b/static/app/components/modals/metricWidgetViewerModal.tsx
index 837756b2978671..c3bb40feeedba8 100644
--- a/static/app/components/modals/metricWidgetViewerModal.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal.tsx
@@ -17,6 +17,7 @@ import type {Organization} from 'sentry/types/organization';
import {defined} from 'sentry/utils';
import {getMetricsUrl} from 'sentry/utils/metrics';
import {toDisplayType} from 'sentry/utils/metrics/dashboard';
+import {hasCustomMetrics} from 'sentry/utils/metrics/features';
import {parseMRI} from 'sentry/utils/metrics/mri';
import {MetricExpressionType} from 'sentry/utils/metrics/types';
import {useVirtualMetricsContext} from 'sentry/utils/metrics/virtualMetricsContext';
@@ -298,13 +299,14 @@ function MetricWidgetViewerModal({
const handleClose = useCallback(() => {
if (
userHasModified &&
+ hasCustomMetrics(organization) &&
// eslint-disable-next-line no-alert
!window.confirm(t('You have unsaved changes, are you sure you want to close?'))
) {
return;
}
closeModal();
- }, [userHasModified, closeModal]);
+ }, [userHasModified, closeModal, organization]);
const {mri, aggregation, query, condition} = metricQueries[0];
@@ -325,7 +327,7 @@ function MetricWidgetViewerModal({
-
+
{t('Open in Metrics')}
-
- {t('Save changes')}
-
+ {hasCustomMetrics(organization) && (
+
+ {t('Save changes')}
+
+ )}
diff --git a/static/app/components/modals/metricWidgetViewerModal/queries.tsx b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
index 8fb9cc77b3507f..363f28c2e1d678 100644
--- a/static/app/components/modals/metricWidgetViewerModal/queries.tsx
+++ b/static/app/components/modals/metricWidgetViewerModal/queries.tsx
@@ -30,7 +30,11 @@ import {
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {isCustomMetric} from 'sentry/utils/metrics';
-import {hasMetricAlertFeature, hasMetricsNewInputs} from 'sentry/utils/metrics/features';
+import {
+ hasCustomMetrics,
+ hasMetricAlertFeature,
+ hasMetricsNewInputs,
+} from 'sentry/utils/metrics/features';
import {MetricExpressionType} from 'sentry/utils/metrics/types';
import useOrganization from 'sentry/utils/useOrganization';
import usePageFilters from 'sentry/utils/usePageFilters';
@@ -336,9 +340,9 @@ function QueryContextMenu({
},
};
- return customMetric
+ return hasCustomMetrics(organization)
? [duplicateQueryItem, aliasItem, addAlertItem, removeQueryItem, settingsItem]
- : [duplicateQueryItem, aliasItem, addAlertItem, removeQueryItem];
+ : [duplicateQueryItem, aliasItem, removeQueryItem, settingsItem];
}, [
metricsQuery.mri,
createAlert,
diff --git a/static/app/views/alerts/rules/metric/details/body.tsx b/static/app/views/alerts/rules/metric/details/body.tsx
index 777255d06fc67f..07e6cd4caae3a5 100644
--- a/static/app/views/alerts/rules/metric/details/body.tsx
+++ b/static/app/views/alerts/rules/metric/details/body.tsx
@@ -173,7 +173,7 @@ export default function MetricDetailsBody({
{isCustomMetricAlert(rule.aggregate) &&
!isInsightsMetricAlert(rule.aggregate) && (
-
+
)}
{selectedIncident?.alertRule.status === AlertRuleStatus.SNAPSHOT && (
diff --git a/static/app/views/alerts/rules/metric/ruleForm.tsx b/static/app/views/alerts/rules/metric/ruleForm.tsx
index e05e34c681d0f5..78f07fdb81be8f 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.tsx
@@ -1219,7 +1219,9 @@ class RuleFormContainer extends DeprecatedAsyncComponent {
{isCustomMetricAlert(rule.aggregate) &&
- !isInsightsMetricAlert(rule.aggregate) && }
+ !isInsightsMetricAlert(rule.aggregate) && (
+
+ )}
{eventView && }
diff --git a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx
index edda92df32bb05..f41c5b377bb7be 100644
--- a/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx
+++ b/static/app/views/performance/newTraceDetails/traceModels/traceTree.tsx
@@ -803,10 +803,6 @@ export class TraceTree {
baseTraceNode.profiles.push(profile);
}
- for (const [node, vitals] of tree.vitals) {
- this.vitals.set(node, vitals);
- }
-
for (const [node, _] of tree.vitals) {
if (
baseTraceNode.space?.[0] &&
From 56606e582faf0df98b44f2af2a801b38153d4c23 Mon Sep 17 00:00:00 2001
From: mia hsu <55610339+ameliahsu@users.noreply.github.com>
Date: Fri, 4 Oct 2024 10:17:30 -0700
Subject: [PATCH 132/139] ref(selectControl): hide overflow for multi value
tokens (#78621)
handle input overflow and set max height of selectControl for multi
value tokens
before:
https://github.com/user-attachments/assets/4484e2a6-e605-4503-b83f-f165f4ace18c
after:
https://github.com/user-attachments/assets/6bfc6312-8422-4b95-ba13-5ad923367fe9
---
.../forms/controls/selectControl.tsx | 11 +++-
.../inviteRowControlNew.tsx | 56 ++++++++-----------
2 files changed, 34 insertions(+), 33 deletions(-)
diff --git a/static/app/components/forms/controls/selectControl.tsx b/static/app/components/forms/controls/selectControl.tsx
index e16e6a0170cd95..b5242fdd6e90e9 100644
--- a/static/app/components/forms/controls/selectControl.tsx
+++ b/static/app/components/forms/controls/selectControl.tsx
@@ -225,6 +225,10 @@ function SelectControl ({
@@ -261,7 +265,12 @@ function SelectControl ({
...provided,
diff --git a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
index b49a1e7ef32967..11965e0c0f9309 100644
--- a/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
+++ b/static/app/components/modals/inviteMembersModal/inviteRowControlNew.tsx
@@ -93,32 +93,30 @@ function InviteRowControl({roleDisabledUnallowed, roleOptions}: Props) {
Email addresses
-
- ValueComponent(props, inviteStatus),
- DropdownIndicator: () => null,
- }}
- options={mapToOptions(emails)}
- onBlur={(e: React.ChangeEvent) => {
- handleInput(e.target.value);
- }}
- styles={getStyles(theme, inviteStatus)}
- onInputChange={setInputValue}
- onKeyDown={handleKeyDown}
- onChange={onChangeEmails}
- multiple
- creatable
- clearable
- onClear={reset}
- menuIsOpen={false}
- />
-
+ ValueComponent(props, inviteStatus),
+ DropdownIndicator: () => null,
+ }}
+ options={mapToOptions(emails)}
+ onBlur={(e: React.ChangeEvent) => {
+ handleInput(e.target.value);
+ }}
+ styles={getStyles(theme, inviteStatus)}
+ onInputChange={setInputValue}
+ onKeyDown={handleKeyDown}
+ onChange={onChangeEmails}
+ multiple
+ creatable
+ clearable
+ onClear={reset}
+ menuIsOpen={false}
+ />
@@ -211,12 +209,6 @@ const RowWrapper = styled('div')`
gap: ${space(1.5)};
`;
-const EmailWrapper = styled('div')`
- &:focus-within {
- display: grid;
- }
-`;
-
const RoleTeamWrapper = styled('div')`
display: grid;
gap: ${space(1.5)};
From f4e3f606d88bc798e8dae3f531f799859928b1f0 Mon Sep 17 00:00:00 2001
From: Tony Xiao
Date: Fri, 4 Oct 2024 13:19:37 -0400
Subject: [PATCH 133/139] fix(charts): Properly synchronize charts (#78569)
The synchronize worked in explore but seems broken in insights.
Generalizing it into the existing hook.
---
static/app/views/explore/charts/index.tsx | 21 +++++++--------
.../insights/common/components/chart.tsx | 27 ++++++++++++++-----
.../common/views/spans/spanTimeCharts.tsx | 2 +-
.../database/views/databaseLandingPage.tsx | 2 +-
.../views/databaseSpanSummaryPage.tsx | 2 +-
.../http/views/httpDomainSummaryPage.tsx | 5 +++-
.../insights/http/views/httpLandingPage.tsx | 5 +++-
7 files changed, 41 insertions(+), 23 deletions(-)
diff --git a/static/app/views/explore/charts/index.tsx b/static/app/views/explore/charts/index.tsx
index b5484c2dac005f..fe9caa5a2010ac 100644
--- a/static/app/views/explore/charts/index.tsx
+++ b/static/app/views/explore/charts/index.tsx
@@ -1,6 +1,5 @@
-import {Fragment, useCallback, useEffect, useMemo, useState} from 'react';
+import {Fragment, useCallback, useMemo} from 'react';
import styled from '@emotion/styled';
-import * as echarts from 'echarts/core';
import {getInterval} from 'sentry/components/charts/utils';
import {CompactSelect} from 'sentry/components/compactSelect';
@@ -15,7 +14,10 @@ import {formatVersion} from 'sentry/utils/versions/formatVersion';
import {useChartInterval} from 'sentry/views/explore/hooks/useChartInterval';
import {useDataset} from 'sentry/views/explore/hooks/useDataset';
import {useVisualizes} from 'sentry/views/explore/hooks/useVisualizes';
-import Chart, {ChartType} from 'sentry/views/insights/common/components/chart';
+import Chart, {
+ ChartType,
+ useSynchronizeCharts,
+} from 'sentry/views/insights/common/components/chart';
import ChartPanel from 'sentry/views/insights/common/components/chartPanel';
import {useSortedTimeSeries} from 'sentry/views/insights/common/queries/useSortedTimeSeries';
import {CHART_HEIGHT} from 'sentry/views/insights/database/settings';
@@ -116,14 +118,11 @@ export function ExploreCharts({query}: ExploreChartsProps) {
[visualizes, setVisualizes]
);
- // Synchronize chart cursors
- const [_, setRenderTrigger] = useState(0);
- useEffect(() => {
- if (!timeSeriesResult.isPending) {
- echarts?.connect(EXPLORE_CHART_GROUP);
- setRenderTrigger(prev => (prev + 1) % Number.MAX_SAFE_INTEGER);
- }
- }, [visualizes, timeSeriesResult.isPending]);
+ useSynchronizeCharts(
+ visualizes.length,
+ !timeSeriesResult.isPending,
+ EXPLORE_CHART_GROUP
+ );
return (
diff --git a/static/app/views/insights/common/components/chart.tsx b/static/app/views/insights/common/components/chart.tsx
index a60773ad40879c..16e4cb150be959 100644
--- a/static/app/views/insights/common/components/chart.tsx
+++ b/static/app/views/insights/common/components/chart.tsx
@@ -1,5 +1,5 @@
import type {RefObject} from 'react';
-import {createContext, useContext, useEffect, useMemo, useRef, useState} from 'react';
+import {createContext, useContext, useEffect, useMemo, useReducer, useRef} from 'react';
import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import type {LineSeriesOption} from 'echarts';
@@ -611,14 +611,27 @@ export function computeAxisMax(data: Series[], stacked?: boolean) {
return Math.ceil(Math.ceil(maxValue / step) * step);
}
-export function useSynchronizeCharts(deps: boolean[] = []) {
- const [synchronized, setSynchronized] = useState(false);
+export function useSynchronizeCharts(
+ charts: number,
+ ready: boolean,
+ group: string = STARFISH_CHART_GROUP
+) {
+ // Tries to connect all the charts under the same group so the cursor is shared.
+ const [, forceUpdate] = useReducer(x => x + 1, 0);
+
useEffect(() => {
- if (deps.every(Boolean)) {
- echarts?.connect?.(STARFISH_CHART_GROUP);
- setSynchronized(true);
+ if (charts && ready) {
+ echarts?.connect?.(group);
+
+ // need to force a re-render otherwise only the currently visible charts
+ // in the group will end up connected
+ forceUpdate();
}
- }, [deps, synchronized]);
+ }, [
+ charts, // this re-connects when new charts are added/removed
+ ready, // this waits until the chart data has loaded before attempting to connect
+ group,
+ ]);
}
const StyledTransparentLoadingMask = styled(props => (
diff --git a/static/app/views/insights/common/views/spans/spanTimeCharts.tsx b/static/app/views/insights/common/views/spans/spanTimeCharts.tsx
index a6372453ff185f..7fa4a5e6ce7005 100644
--- a/static/app/views/insights/common/views/spans/spanTimeCharts.tsx
+++ b/static/app/views/insights/common/views/spans/spanTimeCharts.tsx
@@ -81,7 +81,7 @@ export function SpanTimeCharts({
referrer: 'api.starfish.span-time-charts',
});
- useSynchronizeCharts([!isPending]);
+ useSynchronizeCharts(1, !isPending);
const moduleCharts: Record<
ModuleName,
diff --git a/static/app/views/insights/database/views/databaseLandingPage.tsx b/static/app/views/insights/database/views/databaseLandingPage.tsx
index 9a18c0e04d2620..1cb6dba3b94ec0 100644
--- a/static/app/views/insights/database/views/databaseLandingPage.tsx
+++ b/static/app/views/insights/database/views/databaseLandingPage.tsx
@@ -159,7 +159,7 @@ export function DatabaseLandingPage() {
) ||
throughputData['spm()'].data?.some(({value}) => value > 0);
- useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]);
+ useSynchronizeCharts(2, !isThroughputDataLoading && !isDurationDataLoading);
const crumbs = useModuleBreadcrumbs('db');
diff --git a/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx b/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx
index 02ca54da1ff0d5..48911dd0bc6e4d 100644
--- a/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx
+++ b/static/app/views/insights/database/views/databaseSpanSummaryPage.tsx
@@ -180,7 +180,7 @@ export function DatabaseSpanSummaryPage({params}: Props) {
'api.starfish.span-summary-page-metrics-chart'
);
- useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]);
+ useSynchronizeCharts(2, !isThroughputDataLoading && !isDurationDataLoading);
const crumbs = useModuleBreadcrumbs('db');
diff --git a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
index 0b984220bf0e5f..e9686b83b48b56 100644
--- a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
+++ b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
@@ -180,7 +180,10 @@ export function HTTPDomainSummaryPage() {
Referrer.DOMAIN_SUMMARY_TRANSACTIONS_LIST
);
- useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]);
+ useSynchronizeCharts(
+ 3,
+ !isThroughputDataLoading && !isDurationDataLoading && !isResponseCodeDataLoading
+ );
const crumbs = useModuleBreadcrumbs('http');
diff --git a/static/app/views/insights/http/views/httpLandingPage.tsx b/static/app/views/insights/http/views/httpLandingPage.tsx
index 8514453c202fa4..7bc9af9a9fd77c 100644
--- a/static/app/views/insights/http/views/httpLandingPage.tsx
+++ b/static/app/views/insights/http/views/httpLandingPage.tsx
@@ -157,7 +157,10 @@ export function HTTPLandingPage() {
Referrer.LANDING_DOMAINS_LIST
);
- useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]);
+ useSynchronizeCharts(
+ 3,
+ !isThroughputDataLoading && !isDurationDataLoading && !isResponseCodeDataLoading
+ );
const crumbs = useModuleBreadcrumbs('http');
From 6cdad46cf609fba9d3cd2662d2a0f1687a819bc9 Mon Sep 17 00:00:00 2001
From: Dan Fuller
Date: Fri, 4 Oct 2024 10:47:35 -0700
Subject: [PATCH 134/139] feat(issue_platform): Auto import grouptype.py in all
Django apps if present (#78582)
This pr ensures that if a `grouptype.py` is implemented in any Django
app that we use that it will be imported. This allows us to define
`GroupType` entries outside of `issues/` without worrying that they
might not be imported.
This allows us to move logic that is specific to given issue types
outside of `issues/`. In retrospect, it was probably not the right move
to store the actual definitions here since the platform is meant to be
generic and not be aware of the details of any specific issue type.
As well as this, as part of alerts are issues we're planning on hooking
in information about detectors into the issue types, which would mean
that we'd need to import from `uptime`/`incidents`/etc. This would
likely result in import loops.
---
src/sentry/issues/grouptype.py | 21 +++++++++++++++++++++
src/sentry/runner/initializer.py | 8 ++++++++
2 files changed, 29 insertions(+)
diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py
index 91887dceaa23e5..1c19909b0a452e 100644
--- a/src/sentry/issues/grouptype.py
+++ b/src/sentry/issues/grouptype.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import importlib
from collections import defaultdict
from dataclasses import dataclass, field
from datetime import timedelta
@@ -7,6 +8,7 @@
from typing import TYPE_CHECKING, Any
import sentry_sdk
+from django.apps import apps
from redis.client import StrictRedis
from rediscluster import RedisCluster
@@ -20,6 +22,9 @@
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.users.models.user import User
+import logging
+
+logger = logging.getLogger(__name__)
class GroupCategory(Enum):
@@ -627,3 +632,19 @@ def should_create_group(
else:
client.expire(key, noise_config.expiry_seconds)
return False
+
+
+def import_grouptype():
+ """
+ Ensures that grouptype.py is imported in any apps that implement it. We do this to make sure that all implemented
+ grouptypes are loaded and registered.
+ """
+ for app_config in apps.get_app_configs():
+ grouptype_module = f"{app_config.name}.grouptype"
+ try:
+ # Try to import the module
+ importlib.import_module(grouptype_module)
+ logger.debug("Imported module", extra={"module_name": grouptype_module})
+ except ModuleNotFoundError:
+ # If the module is not found, continue without any issues
+ logger.debug("No grouptypes found for app", extra={"app": app_config.name})
diff --git a/src/sentry/runner/initializer.py b/src/sentry/runner/initializer.py
index 80d46552305cf4..9408bd5b5f51a1 100644
--- a/src/sentry/runner/initializer.py
+++ b/src/sentry/runner/initializer.py
@@ -389,6 +389,8 @@ def initialize_app(config: dict[str, Any], skip_service_validation: bool = False
setup_services(validate=not skip_service_validation)
+ import_grouptype()
+
from django.utils import timezone
from sentry.app import env
@@ -711,3 +713,9 @@ def validate_outbox_config() -> None:
for outbox_name in settings.SENTRY_OUTBOX_MODELS["REGION"]:
RegionOutboxBase.from_outbox_name(outbox_name)
+
+
+def import_grouptype() -> None:
+ from sentry.issues.grouptype import import_grouptype
+
+ import_grouptype()
From 5d8fafedce599b7c113b0ead426b4b0c8865b371 Mon Sep 17 00:00:00 2001
From: "Armen Zambrano G." <44410+armenzg@users.noreply.github.com>
Date: Fri, 4 Oct 2024 13:54:56 -0400
Subject: [PATCH 135/139] feat(issue_platform): Enable UI for Issue Platform
deletion (#78506)
This enables the UI for Issue Platform deletion.
This depends on #77794.
---
.../views/issueDetails/actions/index.spec.tsx | 127 +++++++++++++-----
.../app/views/issueDetails/actions/index.tsx | 15 ++-
2 files changed, 105 insertions(+), 37 deletions(-)
diff --git a/static/app/views/issueDetails/actions/index.spec.tsx b/static/app/views/issueDetails/actions/index.spec.tsx
index 4d0c028eb36dd1..8a4161ea2b425c 100644
--- a/static/app/views/issueDetails/actions/index.spec.tsx
+++ b/static/app/views/issueDetails/actions/index.spec.tsx
@@ -34,6 +34,12 @@ const group = GroupFixture({
project,
});
+const issuePlatformGroup = GroupFixture({
+ id: '1338',
+ issueCategory: IssueCategory.PERFORMANCE,
+ project,
+});
+
const organization = OrganizationFixture({
id: '4660',
slug: 'org',
@@ -209,48 +215,99 @@ describe('GroupActions', function () {
expect(updateMock).toHaveBeenCalled();
});
- it('opens delete confirm modal from more actions dropdown', async () => {
- const org = OrganizationFixture({
- ...organization,
- access: [...organization.access, 'event:admin'],
- });
- MockApiClient.addMockResponse({
- url: `/projects/${org.slug}/${project.slug}/issues/`,
- method: 'PUT',
- body: {},
- });
- const deleteMock = MockApiClient.addMockResponse({
- url: `/projects/${org.slug}/${project.slug}/issues/`,
- method: 'DELETE',
- body: {},
+ describe('delete', function () {
+ it('opens delete confirm modal from more actions dropdown', async () => {
+ const org = OrganizationFixture({
+ ...organization,
+ access: [...organization.access, 'event:admin'],
+ });
+ MockApiClient.addMockResponse({
+ url: `/projects/${org.slug}/${project.slug}/issues/`,
+ method: 'PUT',
+ body: {},
+ });
+ const deleteMock = MockApiClient.addMockResponse({
+ url: `/projects/${org.slug}/${project.slug}/issues/`,
+ method: 'DELETE',
+ body: {},
+ });
+ render(
+
+
+
+ ,
+ {organization: org}
+ );
+
+ await userEvent.click(screen.getByLabelText('More Actions'));
+ await userEvent.click(await screen.findByRole('menuitemradio', {name: 'Delete'}));
+
+ const modal = screen.getByRole('dialog');
+ expect(
+ within(modal).getByText(/Deleting this issue is permanent/)
+ ).toBeInTheDocument();
+
+ await userEvent.click(within(modal).getByRole('button', {name: 'Delete'}));
+
+ expect(deleteMock).toHaveBeenCalled();
+ expect(browserHistory.push).toHaveBeenCalledWith({
+ pathname: `/organizations/${org.slug}/issues/`,
+ query: {project: project.id},
+ });
});
- render(
-
-
+
+ it('delete for issue platform', async () => {
+ const org = OrganizationFixture({
+ access: ['event:admin'], // Delete is only shown if this is present
+ });
+ render(
- ,
- {organization: org}
- );
-
- await userEvent.click(screen.getByLabelText('More Actions'));
- await userEvent.click(await screen.findByRole('menuitemradio', {name: 'Delete'}));
-
- const modal = screen.getByRole('dialog');
- expect(
- within(modal).getByText(/Deleting this issue is permanent/)
- ).toBeInTheDocument();
+ />,
+ {organization: org}
+ );
- await userEvent.click(within(modal).getByRole('button', {name: 'Delete'}));
+ await userEvent.click(screen.getByLabelText('More Actions'));
+ expect(await screen.findByTestId('delete-issue')).toHaveAttribute(
+ 'aria-disabled',
+ 'true'
+ );
+ expect(await screen.findByTestId('delete-and-discard')).toHaveAttribute(
+ 'aria-disabled',
+ 'true'
+ );
+ });
+ it('delete for issue platform is enabled with feature flag', async () => {
+ const org = OrganizationFixture({
+ access: ['event:admin'],
+ features: ['issue-platform-deletion-ui'],
+ });
+ render(
+ ,
+ {organization: org}
+ );
- expect(deleteMock).toHaveBeenCalled();
- expect(browserHistory.push).toHaveBeenCalledWith({
- pathname: `/organizations/${org.slug}/issues/`,
- query: {project: project.id},
+ await userEvent.click(screen.getByLabelText('More Actions'));
+ expect(await screen.findByTestId('delete-issue')).not.toHaveAttribute(
+ 'aria-disabled'
+ );
+ expect(await screen.findByTestId('delete-and-discard')).toHaveAttribute(
+ 'aria-disabled',
+ 'true'
+ );
});
});
diff --git a/static/app/views/issueDetails/actions/index.tsx b/static/app/views/issueDetails/actions/index.tsx
index 1538b3c91b5a79..76a0dae7577e5b 100644
--- a/static/app/views/issueDetails/actions/index.tsx
+++ b/static/app/views/issueDetails/actions/index.tsx
@@ -42,6 +42,7 @@ import {uniqueId} from 'sentry/utils/guid';
import {getConfigForIssueType} from 'sentry/utils/issueTypeConfig';
import {getAnalyicsDataForProject} from 'sentry/utils/projects';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
+import useOrganization from 'sentry/utils/useOrganization';
import withApi from 'sentry/utils/withApi';
import withOrganization from 'sentry/utils/withOrganization';
import {hasDatasetSelector} from 'sentry/views/dashboards/utils';
@@ -90,6 +91,9 @@ export function Actions(props: Props) {
const hasStreamlinedUI = useHasStreamlinedUI();
+ const org = useOrganization();
+ const hasIssuePlatformDeletionUI = org.features.includes('issue-platform-deletion-ui');
+
const {
actions: {
archiveUntilOccurrence: archiveUntilOccurrenceCap,
@@ -101,6 +105,13 @@ export function Actions(props: Props) {
discover: discoverCap,
} = config;
+ // Update the deleteCap to be enabled if the feature flag is present
+ const updatedDeleteCap = {
+ ...deleteCap,
+ enabled: hasIssuePlatformDeletionUI || deleteCap.enabled,
+ disabledReason: hasIssuePlatformDeletionUI ? null : deleteCap.disabledReason,
+ };
+
const getDiscoverUrl = () => {
const {title, type, shortId} = group;
@@ -490,8 +501,8 @@ export function Actions(props: Props) {
priority: 'danger',
label: t('Delete'),
hidden: !hasDeleteAccess,
- disabled: !deleteCap.enabled,
- details: deleteCap.disabledReason,
+ disabled: !updatedDeleteCap.enabled,
+ details: updatedDeleteCap.disabledReason,
onAction: openDeleteModal,
},
{
From cbb15373c76ffde87ca9bde309db7f2431c4247a Mon Sep 17 00:00:00 2001
From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com>
Date: Fri, 4 Oct 2024 11:24:03 -0700
Subject: [PATCH 136/139] chore(replay): delete remote config files (#78623)
relates to https://github.com/getsentry/team-replay/issues/456
---
.github/CODEOWNERS | 6 -
src/sentry/api/api_owners.py | 1 -
src/sentry/api/urls.py | 14 -
src/sentry/remote_config/README.md | 1 -
src/sentry/remote_config/__init__.py | 0
src/sentry/remote_config/docs/api.md | 157 ---------
src/sentry/remote_config/docs/protocol.md | 106 ------
src/sentry/remote_config/endpoints.py | 152 ---------
src/sentry/remote_config/storage.py | 162 ---------
tests/sentry/remote_config/__init__.py | 0
.../remote_config/endpoints/__init__.py | 0
.../endpoints/test_configuration.py | 308 ------------------
12 files changed, 907 deletions(-)
delete mode 100644 src/sentry/remote_config/README.md
delete mode 100644 src/sentry/remote_config/__init__.py
delete mode 100644 src/sentry/remote_config/docs/api.md
delete mode 100644 src/sentry/remote_config/docs/protocol.md
delete mode 100644 src/sentry/remote_config/endpoints.py
delete mode 100644 src/sentry/remote_config/storage.py
delete mode 100644 tests/sentry/remote_config/__init__.py
delete mode 100644 tests/sentry/remote_config/endpoints/__init__.py
delete mode 100644 tests/sentry/remote_config/endpoints/test_configuration.py
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index ab065eee56efc2..586fdd67e6c8d7 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -323,12 +323,6 @@ tests/sentry/api/endpoints/test_organization_dashboard_widget_details.py @ge
## End of Profiling
-## Configurations
-/src/sentry/remote_config/ @getsentry/replay-backend
-/tests/sentry/remote_config/ @getsentry/replay-backend
-## End of Configurations
-
-
## Flags
/src/sentry/flags/ @getsentry/replay-backend
/tests/sentry/flags/ @getsentry/replay-backend
diff --git a/src/sentry/api/api_owners.py b/src/sentry/api/api_owners.py
index ebe2043f8c6e3e..60e5d1884f26a3 100644
--- a/src/sentry/api/api_owners.py
+++ b/src/sentry/api/api_owners.py
@@ -28,4 +28,3 @@ class ApiOwner(Enum):
TELEMETRY_EXPERIENCE = "telemetry-experience"
UNOWNED = "unowned"
WEB_FRONTEND_SDKS = "team-web-sdk-frontend"
- REMOTE_CONFIG = "replay-backend"
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index f92acdc39da6ab..8a18f68e063638 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -250,10 +250,6 @@
from sentry.monitors.endpoints.project_processing_errors_index import (
ProjectProcessingErrorsIndexEndpoint,
)
-from sentry.remote_config.endpoints import (
- ProjectConfigurationEndpoint,
- ProjectConfigurationProxyEndpoint,
-)
from sentry.replays.endpoints.organization_replay_count import OrganizationReplayCountEndpoint
from sentry.replays.endpoints.organization_replay_details import OrganizationReplayDetailsEndpoint
from sentry.replays.endpoints.organization_replay_events_meta import (
@@ -2441,11 +2437,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
r"^(?P[^\/]+)/(?P[^\/]+)/keys/(?P[^\/]+)/stats/$",
ProjectKeyStatsEndpoint.as_view(),
),
- re_path(
- r"^(?P[^\/]+)/(?P[^\/]+)/configuration/$",
- ProjectConfigurationEndpoint.as_view(),
- name="sentry-api-0-project-key-configuration",
- ),
re_path(
r"^(?P[^/]+)/(?P[^/]+)/members/$",
ProjectMemberIndexEndpoint.as_view(),
@@ -3301,11 +3292,6 @@ def create_group_urls(name_prefix: str) -> list[URLPattern | URLResolver]:
SetupWizard.as_view(),
name="sentry-api-0-project-wizard",
),
- re_path(
- r"^remote-config/projects/(?P[^\/]+)/$",
- ProjectConfigurationProxyEndpoint.as_view(),
- name="sentry-api-0-project-remote-configuration",
- ),
# Internal
re_path(
r"^internal/",
diff --git a/src/sentry/remote_config/README.md b/src/sentry/remote_config/README.md
deleted file mode 100644
index db945aa508a004..00000000000000
--- a/src/sentry/remote_config/README.md
+++ /dev/null
@@ -1 +0,0 @@
-# Remote Configuration Product
diff --git a/src/sentry/remote_config/__init__.py b/src/sentry/remote_config/__init__.py
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/src/sentry/remote_config/docs/api.md b/src/sentry/remote_config/docs/api.md
deleted file mode 100644
index fc05ee85c44d6d..00000000000000
--- a/src/sentry/remote_config/docs/api.md
+++ /dev/null
@@ -1,157 +0,0 @@
-# Configurations API
-
-Host: https://sentry.io/api/0
-
-**Authors.**
-
-@cmanallen
-
-## Configuration [/projects///configuration/]
-
-### Get Configuration [GET]
-
-Retrieve the project's configuration.
-
-**Attributes**
-
-| Column | Type | Description |
-| -------- | -------------- | --------------------------------------------- |
-| features | array[Feature] | Custom, user-defined configuration container. |
-| options | Option | Sentry SDK options container. |
-
-**Feature Object**
-
-| Field | Type | Description |
-| ----- | ------ | ---------------------------------- |
-| key | string | The name used to lookup a feature. |
-| value | any | A JSON value. |
-
-**Option Object**
-
-| Field | Type | Description |
-| ------------------ | ----- | --------------------------------------------------- |
-| sample_rate | float | Error sample rate. A numeric value between 0 and 1. |
-| traces_sample_rate | float | Trace sample rate. A numeric value between 0 and 1. |
-
-**If an existing configuration exists**
-
-- Response 200
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-**If no existing configuration exists**
-
-- Response 404
-
-### Set Configuration [POST]
-
-Set the project's configuration.
-
-- Request
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-- Response 201
-
- ```json
- {
- "data": {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- }
- }
- }
- ```
-
-### Delete Configuration [DELETE]
-
-Delete the project's configuration.
-
-- Response 204
-
-## Configuration Proxy [/remote-config/projects//]
-
-Temporary configuration proxy resource.
-
-### Get Configuration [GET]
-
-Fetch a project's configuration. Responses should be proxied exactly to the SDK.
-
-- Response 200
-
- - Headers
-
- Cache-Control: public, max-age=3600
- Content-Type: application/json
- ETag: a7966bf58e23583c9a5a4059383ff850
-
- - Body
-
- ```json
- {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- },
- "version": 1
- }
- ```
diff --git a/src/sentry/remote_config/docs/protocol.md b/src/sentry/remote_config/docs/protocol.md
deleted file mode 100644
index 30885911c3167f..00000000000000
--- a/src/sentry/remote_config/docs/protocol.md
+++ /dev/null
@@ -1,106 +0,0 @@
-# Remote Configuration Protocol
-
-Host: https://o1300299.ingest.us.sentry.io
-
-**Authors.**
-
-@cmanallen
-
-## Configuration [/api//configuration/]
-
-### Get Configuration [GET]
-
-Retrieve a project's configuration.
-
-**Attributes**
-
-| Field | Type | Description |
-| -------- | -------------- | --------------------------------------------- |
-| features | array[Feature] | Custom, user-defined configuration container. |
-| options | Option | Sentry SDK options container. |
-| version | number | The version of the protocol. |
-
-**Feature Object**
-
-| Field | Type | Description |
-| ----- | ------ | ---------------------------------- |
-| key | string | The name used to lookup a feature. |
-| value | any | A JSON value. |
-
-**Option Object**
-
-| Field | Type | Description |
-| ------------------ | ----- | ------------------ |
-| sample_rate | float | Error sample rate. |
-| traces_sample_rate | float | Trace sample rate. |
-
-**Server ETag Matches**
-
-If the server's ETag matches the request's a 304 (NOT MODIFIED) response is returned.
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: 8832040536272351350
-
-- Response 304
-
- - Headers
-
- Cache-Control: public, max-age=60
- Content-Type: application/json
- ETag: 8832040536272351350
-
-**Server ETag Does Not Match or If-None-Match Omitted**
-
-If the server's ETag does not match the request's a 200 response is returned.
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: ABC
-
-- Response 200
-
- - Headers
-
- Cache-Control: public, max-age=60
- Content-Type: application/json
- ETag: 8832040536272351350
-
- - Body
-
- ```json
- {
- "features": [
- {
- "key": "hello",
- "value": "world"
- },
- {
- "key": "has_access",
- "value": true
- }
- ],
- "options": {
- "sample_rate": 1.0,
- "traces_sample_rate": 0.5
- },
- "version": 1
- }
- ```
-
-**No Configuration Exists for the Project**
-
-- Request
-
- - Headers
-
- Accept: application/json
- If-None-Match: ABC
-
-- Response 404
diff --git a/src/sentry/remote_config/endpoints.py b/src/sentry/remote_config/endpoints.py
deleted file mode 100644
index e2d8fe8c29730e..00000000000000
--- a/src/sentry/remote_config/endpoints.py
+++ /dev/null
@@ -1,152 +0,0 @@
-import hashlib
-
-from django.contrib.auth.models import AnonymousUser
-from rest_framework import serializers
-from rest_framework.authentication import BasicAuthentication
-from rest_framework.request import Request
-from rest_framework.response import Response
-from rest_framework.serializers import Serializer
-
-from sentry import features
-from sentry.api.api_owners import ApiOwner
-from sentry.api.api_publish_status import ApiPublishStatus
-from sentry.api.authentication import AuthenticationSiloLimit
-from sentry.api.base import Endpoint, region_silo_endpoint
-from sentry.api.bases.project import ProjectEndpoint, ProjectEventPermission
-from sentry.api.permissions import RelayPermission
-from sentry.models.project import Project
-from sentry.remote_config.storage import make_api_backend, make_configuration_backend
-from sentry.silo.base import SiloMode
-from sentry.utils import json, metrics
-
-
-class OptionsValidator(Serializer):
- sample_rate = serializers.FloatField(max_value=1.0, min_value=0, required=True)
- traces_sample_rate = serializers.FloatField(max_value=1.0, min_value=0, required=True)
-
-
-class FeatureValidator(Serializer):
- key = serializers.CharField(required=True)
- value = serializers.JSONField(required=True, allow_null=True)
-
-
-class ConfigurationValidator(Serializer):
- id = serializers.UUIDField(read_only=True)
- features: serializers.ListSerializer = serializers.ListSerializer(
- child=FeatureValidator(), required=True
- )
- options = OptionsValidator(required=True)
-
-
-class ConfigurationContainerValidator(Serializer):
- data = ConfigurationValidator(required=True) # type: ignore[assignment]
-
-
-@region_silo_endpoint
-class ProjectConfigurationEndpoint(ProjectEndpoint):
- owner = ApiOwner.REMOTE_CONFIG
- permission_classes = (ProjectEventPermission,)
- publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
- "POST": ApiPublishStatus.EXPERIMENTAL,
- "DELETE": ApiPublishStatus.EXPERIMENTAL,
- }
-
- def get(self, request: Request, project: Project) -> Response:
- """Get remote configuration from project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- remote_config, source = make_api_backend(project).get()
- if remote_config is None:
- return Response("Not found.", status=404)
-
- return Response(
- {"data": remote_config},
- status=200,
- headers={"X-Sentry-Data-Source": source},
- )
-
- def post(self, request: Request, project: Project) -> Response:
- """Set remote configuration in project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- validator = ConfigurationContainerValidator(data=request.data)
- if not validator.is_valid():
- return self.respond(validator.errors, status=400)
-
- result = validator.validated_data["data"]
-
- make_api_backend(project).set(result)
- metrics.incr("remote_config.configuration.write")
- return Response({"data": result}, status=201)
-
- def delete(self, request: Request, project: Project) -> Response:
- """Delete remote configuration from project options."""
- if not features.has(
- "organizations:remote-config", project.organization, actor=request.user
- ):
- return Response("Disabled", status=404)
-
- make_api_backend(project).pop()
- metrics.incr("remote_config.configuration.delete")
- return Response("", status=204)
-
-
-@AuthenticationSiloLimit(SiloMode.REGION)
-class RelayAuthentication(BasicAuthentication):
- """Same as default Relay authentication except without body signing."""
-
- def authenticate(self, request: Request):
- return (AnonymousUser(), None)
-
-
-class RemoteConfigRelayPermission(RelayPermission):
- def has_permission(self, request: Request, view: object) -> bool:
- # Relay has permission to do everything! Except the only thing we expose is a simple
- # read endpoint full of public data...
- return True
-
-
-@region_silo_endpoint
-class ProjectConfigurationProxyEndpoint(Endpoint):
- publish_status = {
- "GET": ApiPublishStatus.EXPERIMENTAL,
- }
- owner = ApiOwner.REMOTE_CONFIG
- authentication_classes = (RelayAuthentication,)
- permission_classes = (RemoteConfigRelayPermission,)
- enforce_rate_limit = False
-
- def get(self, request: Request, project_id: int) -> Response:
- metrics.incr("remote_config.configuration.requested")
-
- project = Project.objects.select_related("organization").get(pk=project_id)
- if not features.has("organizations:remote-config", project.organization, actor=None):
- metrics.incr("remote_config.configuration.flag_disabled")
- return Response("Disabled", status=404)
-
- result, source = make_configuration_backend(project).get()
- if result is None:
- metrics.incr("remote_config.configuration.not_found")
- return Response("Not found", status=404)
-
- result_str = json.dumps(result)
- metrics.incr("remote_config.configuration.returned")
- metrics.distribution("remote_config.configuration.size", value=len(result_str))
-
- # Emulating cache headers just because.
- return Response(
- result,
- status=200,
- headers={
- "Cache-Control": "public, max-age=3600",
- "ETag": hashlib.sha1(result_str.encode()).hexdigest(),
- "X-Sentry-Data-Source": source,
- },
- )
diff --git a/src/sentry/remote_config/storage.py b/src/sentry/remote_config/storage.py
deleted file mode 100644
index 86a74da327445f..00000000000000
--- a/src/sentry/remote_config/storage.py
+++ /dev/null
@@ -1,162 +0,0 @@
-from io import BytesIO
-from typing import TypedDict
-
-from sentry import options
-from sentry.cache import default_cache
-from sentry.models.files.utils import get_storage
-from sentry.models.project import Project
-from sentry.utils import json, metrics
-
-JSONValue = str | int | float | bool | None | list["JSONValue"] | dict[str, "JSONValue"]
-
-
-class Options(TypedDict):
- sample_rate: float
- traces_sample_rate: float
-
-
-class Feature(TypedDict):
- key: str
- value: JSONValue
-
-
-class StorageFormat(TypedDict):
- features: list[Feature]
- options: Options
- version: int
-
-
-class APIFormat(TypedDict):
- features: list[Feature]
- options: Options
-
-
-class ConfigurationCache:
- def __init__(self, key: str) -> None:
- self.key = key
-
- def get(self) -> StorageFormat | None:
- cache_result = default_cache.get(self.key)
-
- if cache_result is None:
- metrics.incr("remote_config.configuration.cache_miss")
- else:
- metrics.incr("remote_config.configuration.cache_hit")
-
- return cache_result
-
- def set(self, value: StorageFormat) -> None:
- default_cache.set(self.key, value=value, timeout=None)
-
- def pop(self) -> None:
- try:
- default_cache.delete(self.key)
- except Exception:
- pass
-
-
-class ConfigurationStorage:
- def __init__(self, key: str) -> None:
- self.key = key
-
- @property
- def storage(self):
- return get_storage(self._make_storage_config())
-
- def get(self) -> StorageFormat | None:
- try:
- blob = self.storage.open(self.key)
- result = blob.read()
- blob.close()
- except Exception:
- return None
-
- if result is None:
- return None
- return json.loads(result)
-
- def set(self, value: StorageFormat) -> None:
- self.storage.save(self.key, BytesIO(json.dumps(value).encode()))
-
- def pop(self) -> None:
- try:
- self.storage.delete(self.key)
- except Exception:
- return None
-
- def _make_storage_config(self) -> dict | None:
- backend = options.get("configurations.storage.backend")
- if backend:
- return {
- "backend": backend,
- "options": options.get("configurations.storage.options"),
- }
- else:
- return None
-
-
-class ConfigurationBackend:
- def __init__(self, project: Project) -> None:
- self.project = project
- self.key = f"configurations/{self.project.id}/production"
-
- self.cache = ConfigurationCache(self.key)
- self.storage = ConfigurationStorage(self.key)
-
- def get(self) -> tuple[StorageFormat | None, str]:
- cache_result = self.cache.get()
- if cache_result is not None:
- return (cache_result, "cache")
-
- storage_result = self.storage.get()
- if storage_result:
- self.cache.set(storage_result)
-
- return (storage_result, "store")
-
- def set(self, value: StorageFormat) -> None:
- self.storage.set(value)
- self.cache.set(value)
-
- def pop(self) -> None:
- self.cache.pop()
- self.storage.pop()
-
-
-class APIBackendDecorator:
- def __init__(self, backend: ConfigurationBackend) -> None:
- self.driver = backend
-
- def get(self) -> tuple[APIFormat | None, str]:
- result, source = self.driver.get()
- return self._deserialize(result), source
-
- def set(self, value: APIFormat) -> None:
- self.driver.set(self._serialize(value))
-
- def pop(self) -> None:
- self.driver.pop()
-
- def _deserialize(self, result: StorageFormat | None) -> APIFormat | None:
- if result is None:
- return None
-
- return {
- "features": result["features"],
- "options": result["options"],
- }
-
- def _serialize(self, result: APIFormat) -> StorageFormat:
- return {
- "features": result["features"],
- "options": result["options"],
- "version": 1,
- }
-
-
-def make_configuration_backend(project: Project):
- return ConfigurationBackend(project)
-
-
-def make_api_backend(project: Project):
- return APIBackendDecorator(make_configuration_backend(project))
diff --git a/tests/sentry/remote_config/__init__.py b/tests/sentry/remote_config/__init__.py
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/tests/sentry/remote_config/endpoints/__init__.py b/tests/sentry/remote_config/endpoints/__init__.py
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/tests/sentry/remote_config/endpoints/test_configuration.py b/tests/sentry/remote_config/endpoints/test_configuration.py
deleted file mode 100644
index fc744e90064bb0..00000000000000
--- a/tests/sentry/remote_config/endpoints/test_configuration.py
+++ /dev/null
@@ -1,308 +0,0 @@
-from typing import Any
-from uuid import uuid4
-
-from django.urls import reverse
-from sentry_relay.auth import generate_key_pair
-
-from sentry.models.relay import Relay
-from sentry.remote_config.storage import make_api_backend
-from sentry.testutils.cases import APITestCase
-
-REMOTE_CONFIG_FEATURES = {"organizations:remote-config": True}
-
-
-class ConfigurationAPITestCase(APITestCase):
- endpoint = "sentry-api-0-project-key-configuration"
-
- def setUp(self):
- super().setUp()
- self.login_as(self.user)
- self.url = reverse(self.endpoint, args=(self.organization.slug, self.project.slug))
-
- @property
- def storage(self):
- return make_api_backend(self.project)
-
- def test_get_configuration(self):
- self.storage.set(
- {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- },
- )
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(self.url)
-
- assert response.status_code == 200
- assert response["X-Sentry-Data-Source"] == "cache"
- assert response.json() == {
- "data": {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- }
- }
-
- def test_get_configuration_no_cache(self):
- self.storage.set(
- {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- },
- )
- self.storage.driver.cache.pop()
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(self.url)
-
- assert response.status_code == 200
- assert response["X-Sentry-Data-Source"] == "store"
- assert response.json() == {
- "data": {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- }
- }
- assert self.storage.driver.cache.get() is not None
-
- def test_get_configuration_not_enabled(self):
- self.storage.set(
- {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- },
- )
- response = self.client.get(self.url)
- assert response.status_code == 404
-
- def test_get_configuration_not_found(self):
- self.storage.pop() # Pop anything that might be in the cache.
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(self.url)
- assert response.status_code == 404
-
- def test_post_configuration(self):
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(
- self.url,
- data={
- "data": {
- "features": [{"key": "hello", "value": "world"}],
- "options": {"sample_rate": 1.0, "traces_sample_rate": 0.2},
- }
- },
- format="json",
- )
-
- assert response.status_code == 201, response.content
- assert response.json() == {
- "data": {
- "features": [{"key": "hello", "value": "world"}],
- "options": {"sample_rate": 1.0, "traces_sample_rate": 0.2},
- }
- }
-
- # Assert the configuration was stored successfully.
- assert self.storage.get()[0] == response.json()["data"]
-
- def test_post_configuration_not_enabled(self):
- response = self.client.post(self.url, data={}, format="json")
- assert response.status_code == 404
-
- def test_post_configuration_different_types(self):
- data: dict[str, Any] = {
- "data": {"options": {"sample_rate": 1.0, "traces_sample_rate": 0.2}}
- }
-
- # Null type
- data["data"]["features"] = [{"key": "abc", "value": None}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] is None
-
- # Bool types
- data["data"]["features"] = [{"key": "abc", "value": False}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] is False
-
- # String types
- data["data"]["features"] = [{"key": "abc", "value": "string"}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] == "string"
-
- # Integer types
- data["data"]["features"] = [{"key": "abc", "value": 1}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] == 1
-
- # Float types
- data["data"]["features"] = [{"key": "abc", "value": 1.0}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] == 1.0
-
- # Array types
- data["data"]["features"] = [{"key": "abc", "value": ["a", "b"]}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] == ["a", "b"]
-
- # Object types
- data["data"]["features"] = [{"key": "abc", "value": {"hello": "world"}}]
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(self.url, data=data, format="json")
- assert response.status_code == 201, response.content
- assert response.json()["data"]["features"][0]["value"] == {"hello": "world"}
-
- def test_post_configuration_required_fields(self):
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.post(
- self.url,
- data={"data": {}},
- format="json",
- )
- assert response.status_code == 400, response.content
-
- result = response.json()
- assert len(result["data"]) == 2
- assert result["data"]["options"] is not None
- assert result["data"]["features"] is not None
-
- def test_delete_configuration(self):
- self.storage.set(
- {
- "features": [],
- "options": {"sample_rate": 1.0, "traces_sample_rate": 1.0},
- }
- )
- assert self.storage.get()[0] is not None
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.delete(self.url)
- assert response.status_code == 204
- assert self.storage.get()[0] is None
-
- def test_delete_configuration_no_cache(self):
- self.storage.set(
- {
- "features": [],
- "options": {"sample_rate": 1.0, "traces_sample_rate": 1.0},
- }
- )
- assert self.storage.get()[0] is not None
- self.storage.driver.cache.pop()
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.delete(self.url)
- assert response.status_code == 204
- assert self.storage.get()[0] is None
- assert self.storage.driver.cache.get() is None
-
- def test_delete_configuration_not_found(self):
- self.storage.pop()
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.delete(self.url)
- assert response.status_code == 204
-
- def test_delete_configuration_not_enabled(self):
- response = self.client.delete(self.url)
- assert response.status_code == 404
-
-
-class ConfigurationProxyAPITestCase(APITestCase):
- endpoint = "sentry-api-0-project-remote-configuration"
-
- def setUp(self):
- super().setUp()
- self.url = reverse(self.endpoint, args=(self.project.id,))
-
- @property
- def storage(self):
- return make_api_backend(self.project)
-
- def test_remote_config_proxy(self):
- """Assert configurations are returned successfully."""
- self.storage.set(
- {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- },
- )
-
- keys = generate_key_pair()
- relay = Relay.objects.create(
- relay_id=str(uuid4()), public_key=str(keys[1]), is_internal=True
- )
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(
- self.url, content_type="application/json", HTTP_X_SENTRY_RELAY_ID=relay.relay_id
- )
- assert response.status_code == 200
- assert response["ETag"] is not None
- assert response["Cache-Control"] == "public, max-age=3600"
- assert response["Content-Type"] == "application/json"
- assert response["X-Sentry-Data-Source"] == "cache"
-
- def test_remote_config_proxy_not_cached(self):
- """Assert configurations are returned successfully."""
- self.storage.set(
- {
- "features": [{"key": "abc", "value": "def"}],
- "options": {"sample_rate": 0.5, "traces_sample_rate": 0},
- },
- )
- self.storage.driver.cache.pop()
-
- keys = generate_key_pair()
- relay = Relay.objects.create(
- relay_id=str(uuid4()), public_key=str(keys[1]), is_internal=True
- )
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(
- self.url, content_type="application/json", HTTP_X_SENTRY_RELAY_ID=relay.relay_id
- )
- assert response.status_code == 200
- assert response["ETag"] is not None
- assert response["Cache-Control"] == "public, max-age=3600"
- assert response["Content-Type"] == "application/json"
- assert response["X-Sentry-Data-Source"] == "store"
-
- def test_remote_config_proxy_not_found(self):
- """Assert missing configurations 404."""
- self.storage.pop()
-
- keys = generate_key_pair()
- relay = Relay.objects.create(
- relay_id=str(uuid4()), public_key=str(keys[1]), is_internal=True
- )
-
- with self.feature(REMOTE_CONFIG_FEATURES):
- response = self.client.get(
- self.url, content_type="application/json", HTTP_X_SENTRY_RELAY_ID=relay.relay_id
- )
- assert response.status_code == 404
-
- def test_remote_config_proxy_feature_disabled(self):
- """Assert access is gated by feature flag."""
- self.storage.pop()
-
- keys = generate_key_pair()
- relay = Relay.objects.create(
- relay_id=str(uuid4()), public_key=str(keys[1]), is_internal=True
- )
-
- response = self.client.get(
- self.url, content_type="application/json", HTTP_X_SENTRY_RELAY_ID=relay.relay_id
- )
- assert response.status_code == 404
From 775fb8ffa86881011953221581a0ec858bd6af33 Mon Sep 17 00:00:00 2001
From: Katie Byers
Date: Fri, 4 Oct 2024 11:29:40 -0700
Subject: [PATCH 137/139] ref(grouping): Remove obsolete try-catch around
`get_hashes` call (#78619)
In `_calculate_event_grouping`, there's a try-catch surrounding the call to `event.get_hashes(loaded_grouping_config)`, in case the grouping config isn't a valid one and we run into a `GroupingConfigNotFound` error . However, since that was put in place, we've changed when and how we load the grouping config, such that now an invalid config would throw a `GroupingConfigNotFound` error before it ever got to the `get_hashes` call (instead it would throw during config loading, at the top of the function, at the `loaded_grouping_config = load_grouping_config(grouping_config)` line). Thus we no longer need the try-catch.
---
src/sentry/grouping/ingest/hashing.py | 12 +-----------
1 file changed, 1 insertion(+), 11 deletions(-)
diff --git a/src/sentry/grouping/ingest/hashing.py b/src/sentry/grouping/ingest/hashing.py
index 8863ab46a38fb8..8b8f5c1f96051e 100644
--- a/src/sentry/grouping/ingest/hashing.py
+++ b/src/sentry/grouping/ingest/hashing.py
@@ -14,7 +14,6 @@
NULL_GROUPING_CONFIG,
BackgroundGroupingConfigLoader,
GroupingConfig,
- GroupingConfigNotFound,
SecondaryGroupingConfigLoader,
apply_server_fingerprinting,
get_fingerprinting_config_for_project,
@@ -70,16 +69,7 @@ def _calculate_event_grouping(
)
with metrics.timer("event_manager.event.get_hashes", tags=metric_tags):
- # TODO: It's not clear we can even hit `GroupingConfigNotFound` here - this is leftover
- # from a time before we started separately retrieving the grouping config and passing it
- # directly to `get_hashes`. Now that we do that, a bogus config will get replaced by the
- # default long before we get here. Should we consolidate bogus config handling into the
- # code actually getting the config?
- try:
- hashes = event.get_hashes(loaded_grouping_config)
- except GroupingConfigNotFound:
- event.data["grouping_config"] = get_grouping_config_dict_for_project(project)
- hashes = event.get_hashes()
+ hashes = event.get_hashes(loaded_grouping_config)
return hashes
From a0aa769d7630093cbacbb7e6c3e1c38b607d0b24 Mon Sep 17 00:00:00 2001
From: Rohan Agarwal <47861399+roaga@users.noreply.github.com>
Date: Fri, 4 Oct 2024 14:35:00 -0400
Subject: [PATCH 138/139] feat(autofix): Add controllable insight cards
(#78432)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Allow the user to re-think the reasoning from any point in the chain.
Also fixes bug in how errored steps are displayed.
---
.../events/autofix/autofixChanges.tsx | 2 +-
.../autofix/autofixInsightCards.spec.tsx | 121 +++++++++-
.../events/autofix/autofixInsightCards.tsx | 226 ++++++++++++++++--
.../events/autofix/autofixMessageBox.tsx | 5 +
.../events/autofix/autofixRootCause.tsx | 2 +-
.../events/autofix/autofixSteps.tsx | 44 ++--
.../components/events/autofix/useAutofix.tsx | 5 +-
7 files changed, 363 insertions(+), 42 deletions(-)
diff --git a/static/app/components/events/autofix/autofixChanges.tsx b/static/app/components/events/autofix/autofixChanges.tsx
index 1bce114ece1276..1f7ffb115af244 100644
--- a/static/app/components/events/autofix/autofixChanges.tsx
+++ b/static/app/components/events/autofix/autofixChanges.tsx
@@ -267,7 +267,7 @@ const ChangesContainer = styled('div')`
border: 1px solid ${p => p.theme.innerBorder};
border-radius: ${p => p.theme.borderRadius};
overflow: hidden;
- box-shadow: ${p => p.theme.dropShadowHeavy};
+ box-shadow: ${p => p.theme.dropShadowMedium};
padding-left: ${space(2)};
padding-right: ${space(2)};
padding-top: ${space(1)};
diff --git a/static/app/components/events/autofix/autofixInsightCards.spec.tsx b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
index f520e0dc2c4377..44aea66e46d20a 100644
--- a/static/app/components/events/autofix/autofixInsightCards.spec.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.spec.tsx
@@ -1,5 +1,6 @@
-import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
+import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import AutofixInsightCards from 'sentry/components/events/autofix/autofixInsightCards';
import type {AutofixInsight} from 'sentry/components/events/autofix/types';
@@ -7,6 +8,8 @@ jest.mock('sentry/utils/marked', () => ({
singleLineRenderer: jest.fn(text => text),
}));
+jest.mock('sentry/actionCreators/indicator');
+
const sampleInsights: AutofixInsight[] = [
{
breadcrumb_context: [
@@ -60,6 +63,12 @@ const sampleRepos = [
},
];
+beforeEach(() => {
+ (addSuccessMessage as jest.Mock).mockClear();
+ (addErrorMessage as jest.Mock).mockClear();
+ MockApiClient.clearMockResponses();
+});
+
describe('AutofixInsightCards', () => {
const renderComponent = (props = {}) => {
return render(
@@ -68,6 +77,9 @@ describe('AutofixInsightCards', () => {
repos={sampleRepos}
hasStepAbove={false}
hasStepBelow={false}
+ groupId="1"
+ runId="1"
+ stepIndex={0}
{...props}
/>
);
@@ -144,4 +156,111 @@ describe('AutofixInsightCards', () => {
expect(screen.getByText('User message')).toBeInTheDocument();
expect(screen.getByText('Another insight')).toBeInTheDocument();
});
+
+ it('renders "Rethink from here" buttons', () => {
+ renderComponent();
+ const rethinkButtons = screen.getAllByText('Rethink from here');
+ expect(rethinkButtons.length).toBeGreaterThan(0);
+ });
+
+ it('shows rethink input overlay when "Rethink from here" is clicked', async () => {
+ renderComponent();
+ const rethinkButton = screen.getAllByText('Rethink from here')[0];
+ await userEvent.click(rethinkButton);
+ expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument();
+ });
+
+ it('hides rethink input overlay when clicked outside', async () => {
+ renderComponent();
+ const rethinkButton = screen.getAllByText('Rethink from here')[0];
+ await userEvent.click(rethinkButton);
+ expect(screen.getByPlaceholderText('Say something...')).toBeInTheDocument();
+
+ await userEvent.click(document.body);
+ expect(screen.queryByPlaceholderText('Say something...')).not.toBeInTheDocument();
+ });
+
+ it('submits rethink request when form is submitted', async () => {
+ const mockApi = MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getAllByText('Rethink from here')[0];
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ expect(mockApi).toHaveBeenCalledWith(
+ '/issues/1/autofix/update/',
+ expect.objectContaining({
+ method: 'POST',
+ data: expect.objectContaining({
+ run_id: '1',
+ payload: expect.objectContaining({
+ type: 'restart_from_point_with_feedback',
+ message: 'Rethink this part',
+ step_index: 0,
+ retain_insight_card_index: null,
+ }),
+ }),
+ })
+ );
+ });
+
+ it('shows success message after successful rethink submission', async () => {
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getAllByText('Rethink from here')[0];
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ await waitFor(() => {
+ expect(addSuccessMessage).toHaveBeenCalledWith("Thanks, I'll rethink this...");
+ });
+ });
+
+ it('shows error message after failed rethink submission', async () => {
+ MockApiClient.addMockResponse({
+ url: '/issues/1/autofix/update/',
+ method: 'POST',
+ statusCode: 500,
+ });
+
+ renderComponent();
+ const rethinkButton = screen.getAllByText('Rethink from here')[0];
+ await userEvent.click(rethinkButton);
+
+ const input = screen.getByPlaceholderText('Say something...');
+ await userEvent.type(input, 'Rethink this part');
+
+ const submitButton = screen.getByLabelText(
+ 'Restart analysis from this point in the chain'
+ );
+ await userEvent.click(submitButton);
+
+ await waitFor(() => {
+ expect(addErrorMessage).toHaveBeenCalledWith(
+ 'Something went wrong when sending Autofix your message.'
+ );
+ });
+ });
});
diff --git a/static/app/components/events/autofix/autofixInsightCards.tsx b/static/app/components/events/autofix/autofixInsightCards.tsx
index a42e04a90bdb26..10a6fe3e3eeffa 100644
--- a/static/app/components/events/autofix/autofixInsightCards.tsx
+++ b/static/app/components/events/autofix/autofixInsightCards.tsx
@@ -1,9 +1,10 @@
-import {useState} from 'react';
+import {useEffect, useRef, useState} from 'react';
import styled from '@emotion/styled';
import {AnimatePresence, type AnimationProps, motion} from 'framer-motion';
import bannerImage from 'sentry-images/insights/module-upsells/insights-module-upsell.svg';
+import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator';
import {Button} from 'sentry/components/button';
import {
replaceHeadersWithBold,
@@ -21,13 +22,24 @@ import {
getBreadcrumbColorConfig,
getBreadcrumbTitle,
} from 'sentry/components/events/breadcrumbs/utils';
+import Input from 'sentry/components/input';
import StructuredEventData from 'sentry/components/structuredEventData';
import Timeline from 'sentry/components/timeline';
-import {IconArrow, IconChevron, IconCode, IconFire, IconUser} from 'sentry/icons';
+import {
+ IconArrow,
+ IconChevron,
+ IconCode,
+ IconFire,
+ IconRefresh,
+ IconUser,
+} from 'sentry/icons';
+import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {BreadcrumbLevelType, BreadcrumbType} from 'sentry/types/breadcrumbs';
import {singleLineRenderer} from 'sentry/utils/marked';
+import {useMutation} from 'sentry/utils/queryClient';
import testableTransition from 'sentry/utils/testableTransition';
+import useApi from 'sentry/utils/useApi';
interface AutofixBreadcrumbSnippetProps {
breadcrumb: BreadcrumbContext;
@@ -113,10 +125,14 @@ const animationProps: AnimationProps = {
};
interface AutofixInsightCardProps {
+ groupId: string;
hasCardAbove: boolean;
hasCardBelow: boolean;
+ index: number;
insight: AutofixInsight;
repos: AutofixRepository[];
+ runId: string;
+ stepIndex: number;
}
function AutofixInsightCard({
@@ -124,6 +140,10 @@ function AutofixInsightCard({
hasCardBelow,
hasCardAbove,
repos,
+ index,
+ stepIndex,
+ groupId,
+ runId,
}: AutofixInsightCardProps) {
const isUserMessage = insight.justification === 'USER';
@@ -132,9 +152,12 @@ function AutofixInsightCard({
{hasCardAbove && (
-
-
-
+
)}
{!isUserMessage && (
@@ -245,9 +268,12 @@ function AutofixInsightCard({
)}
{hasCardBelow && (
-
-
-
+
)}
@@ -256,10 +282,13 @@ function AutofixInsightCard({
}
interface AutofixInsightCardsProps {
+ groupId: string;
hasStepAbove: boolean;
hasStepBelow: boolean;
insights: AutofixInsight[];
repos: AutofixRepository[];
+ runId: string;
+ stepIndex: number;
}
function AutofixInsightCards({
@@ -267,15 +296,21 @@ function AutofixInsightCards({
repos,
hasStepBelow,
hasStepAbove,
+ stepIndex,
+ groupId,
+ runId,
}: AutofixInsightCardsProps) {
return (
{!hasStepAbove && (
Insights
-
-
-
+
)}
{insights.length > 0 ? (
@@ -287,6 +322,10 @@ function AutofixInsightCards({
hasCardBelow={index < insights.length - 1 || hasStepBelow}
hasCardAbove={hasStepAbove && index === 0}
repos={repos}
+ index={index}
+ stepIndex={stepIndex}
+ groupId={groupId}
+ runId={runId}
/>
)
)
@@ -305,6 +344,120 @@ function AutofixInsightCards({
);
}
+function useUpdateInsightCard({groupId, runId}: {groupId: string; runId: string}) {
+ const api = useApi({persistInFlight: true});
+
+ return useMutation({
+ mutationFn: (params: {
+ message: string;
+ retain_insight_card_index: number | null;
+ step_index: number;
+ }) => {
+ return api.requestPromise(`/issues/${groupId}/autofix/update/`, {
+ method: 'POST',
+ data: {
+ run_id: runId,
+ payload: {
+ type: 'restart_from_point_with_feedback',
+ message: params.message,
+ step_index: params.step_index,
+ retain_insight_card_index: params.retain_insight_card_index,
+ },
+ },
+ });
+ },
+ onSuccess: _ => {
+ addSuccessMessage(t("Thanks, I'll rethink this..."));
+ },
+ onError: () => {
+ addErrorMessage(t('Something went wrong when sending Autofix your message.'));
+ },
+ });
+}
+
+function ChainLink({
+ groupId,
+ runId,
+ stepIndex,
+ insightCardAboveIndex,
+}: {
+ groupId: string;
+ insightCardAboveIndex: number | null;
+ runId: string;
+ stepIndex: number;
+}) {
+ const [showOverlay, setShowOverlay] = useState(false);
+ const overlayRef = useRef(null);
+ const [comment, setComment] = useState('');
+ const {mutate: send} = useUpdateInsightCard({groupId, runId});
+
+ const handleClickOutside = event => {
+ if (overlayRef.current && !overlayRef.current.contains(event.target)) {
+ setShowOverlay(false);
+ }
+ };
+
+ useEffect(() => {
+ if (showOverlay) {
+ document.addEventListener('mousedown', handleClickOutside);
+ } else {
+ document.removeEventListener('mousedown', handleClickOutside);
+ }
+ return () => {
+ document.removeEventListener('mousedown', handleClickOutside);
+ };
+ }, [showOverlay]);
+
+ return (
+
+
+ }
+ size="zero"
+ className="hover-button"
+ onClick={() => setShowOverlay(true)}
+ >
+ Rethink from here
+
+
+ {showOverlay && (
+
+
+
+ )}
+
+ );
+}
+
const UserMessageContainer = styled('div')`
color: ${p => p.theme.subText};
display: flex;
@@ -350,11 +503,54 @@ const InsightContainer = styled(motion.div)`
box-shadow: ${p => p.theme.dropShadowMedium};
`;
-const IconContainer = styled('div')`
- padding: ${space(1)};
- display: flex;
- justify-content: center;
+const ArrowContainer = styled('div')`
+ display: grid;
+ grid-template-columns: 1fr auto 1fr;
+ color: ${p => p.theme.subText};
+ align-items: center;
+ position: relative;
+ z-index: 0;
+
+ .arrow-icon {
+ margin-top: ${space(1)};
+ grid-column: 2 / 3;
+ justify-self: center;
+ }
+
+ .hover-button {
+ opacity: 0;
+ grid-column: 3 / 4;
+ justify-self: end;
+ transition: opacity 0.1s ease-in-out;
+ }
+
+ &:hover .hover-button {
+ opacity: 1;
+ }
+`;
+
+const RethinkButton = styled(Button)`
+ font-weight: normal;
+ font-size: small;
+ border: none;
color: ${p => p.theme.subText};
+ margin-top: ${space(1)};
+`;
+
+const RethinkInput = styled('div')`
+ position: absolute;
+ box-shadow: ${p => p.theme.dropShadowHeavy};
+ border: 1px solid ${p => p.theme.border};
+ width: 95%;
+ background: ${p => p.theme.backgroundElevated};
+ padding: ${space(0.5)};
+ border-radius: ${p => p.theme.borderRadius};
+ margin: 0 ${space(2)} 0 ${space(2)};
+
+ .row-form {
+ display: flex;
+ gap: ${space(1)};
+ }
`;
const BreadcrumbItem = styled(Timeline.Item)`
diff --git a/static/app/components/events/autofix/autofixMessageBox.tsx b/static/app/components/events/autofix/autofixMessageBox.tsx
index de8b3c2e9a4014..6c3fe6e0abf25d 100644
--- a/static/app/components/events/autofix/autofixMessageBox.tsx
+++ b/static/app/components/events/autofix/autofixMessageBox.tsx
@@ -114,6 +114,11 @@ function AutofixMessageBox({
const [message, setMessage] = useState('');
const {mutate: send} = useSendMessage({groupId, runId});
+ isDisabled =
+ isDisabled ||
+ step?.status === 'ERROR' ||
+ (step?.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && step.causes?.length === 0);
+
const handleSend = (e: FormEvent) => {
e.preventDefault();
if (message.trim() !== '' || allowEmptyMessage) {
diff --git a/static/app/components/events/autofix/autofixRootCause.tsx b/static/app/components/events/autofix/autofixRootCause.tsx
index b9f68d62238770..bc23c20f00cd7a 100644
--- a/static/app/components/events/autofix/autofixRootCause.tsx
+++ b/static/app/components/events/autofix/autofixRootCause.tsx
@@ -513,7 +513,7 @@ const CausesContainer = styled('div')`
border: 1px solid ${p => p.theme.innerBorder};
border-radius: ${p => p.theme.borderRadius};
overflow: hidden;
- box-shadow: ${p => p.theme.dropShadowHeavy};
+ box-shadow: ${p => p.theme.dropShadowMedium};
`;
const PotentialCausesContainer = styled(CausesContainer)`
diff --git a/static/app/components/events/autofix/autofixSteps.tsx b/static/app/components/events/autofix/autofixSteps.tsx
index 8dacbf92e60e1d..38e40c6be05102 100644
--- a/static/app/components/events/autofix/autofixSteps.tsx
+++ b/static/app/components/events/autofix/autofixSteps.tsx
@@ -80,6 +80,9 @@ export function Step({
repos={repos}
hasStepBelow={hasStepBelow}
hasStepAbove={hasStepAbove}
+ stepIndex={step.index}
+ groupId={groupId}
+ runId={runId}
/>
)}
{step.type === AutofixStepType.ROOT_CAUSE_ANALYSIS && (
@@ -175,12 +178,6 @@ export function AutofixSteps({data, groupId, runId, onRetry}: AutofixStepsProps)
lastStep.type === AutofixStepType.CHANGES && lastStep.status === 'COMPLETED';
const disabled = areCodeChangesShowing ? true : false;
- const previousStep = steps.length > 2 ? steps[steps.length - 2] : null;
- const previousStepErrored =
- previousStep !== null &&
- previousStep?.type === lastStep.type &&
- previousStep.status === 'ERROR';
-
const scrollToMatchingStep = () => {
const matchingStepIndex = steps.findIndex(step => step.type === lastStep.type);
if (matchingStepIndex !== -1 && stepsRef.current[matchingStepIndex]) {
@@ -191,20 +188,27 @@ export function AutofixSteps({data, groupId, runId, onRetry}: AutofixStepsProps)
return (
- {steps.map((step, index) => (
- (stepsRef.current[index] = el)} key={step.id}>
- 0}
- groupId={groupId}
- runId={runId}
- onRetry={onRetry}
- repos={repos}
- hasErroredStepBefore={previousStepErrored}
- />
-
- ))}
+ {steps.map((step, index) => {
+ const previousStep = index > 0 ? steps[index - 1] : null;
+ const previousStepErrored =
+ previousStep !== null &&
+ previousStep?.type === step.type &&
+ previousStep.status === 'ERROR';
+ return (
+ (stepsRef.current[index] = el)} key={step.id}>
+ 0}
+ groupId={groupId}
+ runId={runId}
+ onRetry={onRetry}
+ repos={repos}
+ hasErroredStepBefore={previousStepErrored}
+ />
+
+ );
+ })}
{
return data;
};
-const isPolling = (autofixData?: AutofixData | null) =>
- autofixData?.status === 'PROCESSING' ||
- autofixData?.status === 'PENDING' ||
- autofixData?.status === 'NEED_MORE_INFORMATION';
+const isPolling = (autofixData?: AutofixData | null) => autofixData?.status !== 'PENDING';
export const useAutofixData = ({groupId}: {groupId: string}) => {
const {data} = useApiQuery(makeAutofixQueryKey(groupId), {
From c6dbdaba37381a3bfba1a1517f07e2a30c1f1b64 Mon Sep 17 00:00:00 2001
From: William Mak
Date: Fri, 4 Oct 2024 14:42:14 -0400
Subject: [PATCH 139/139] chore(spans): Update proto version (#78613)
- updates to latest proto
---
requirements-base.txt | 2 +-
requirements-dev-frozen.txt | 2 +-
requirements-frozen.txt | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/requirements-base.txt b/requirements-base.txt
index cd6233b8fc4e21..a65c93a4fd5b01 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -67,7 +67,7 @@ rfc3986-validator>=0.1.1
sentry-arroyo>=2.16.5
sentry-kafka-schemas>=0.1.111
sentry-ophio==1.0.0
-sentry-protos>=0.1.21
+sentry-protos>=0.1.23
sentry-redis-tools>=0.1.7
sentry-relay>=0.9.2
sentry-sdk>=2.15.0
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index 319af852deffec..fb982f65397072 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -184,7 +184,7 @@ sentry-forked-django-stubs==5.1.0.post1
sentry-forked-djangorestframework-stubs==3.15.1.post1
sentry-kafka-schemas==0.1.111
sentry-ophio==1.0.0
-sentry-protos==0.1.21
+sentry-protos==0.1.23
sentry-redis-tools==0.1.7
sentry-relay==0.9.2
sentry-sdk==2.15.0
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 30ed0334156519..ddc542ef967a68 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -125,7 +125,7 @@ s3transfer==0.10.0
sentry-arroyo==2.16.5
sentry-kafka-schemas==0.1.111
sentry-ophio==1.0.0
-sentry-protos==0.1.21
+sentry-protos==0.1.23
sentry-redis-tools==0.1.7
sentry-relay==0.9.2
sentry-sdk==2.15.0