diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json index 87a66b8cadc792..68d4d162241c28 100644 --- a/fixtures/backup/model_dependencies/detailed.json +++ b/fixtures/backup/model_dependencies/detailed.json @@ -737,30 +737,6 @@ "table_name": "sentry_artifactbundle", "uniques": [] }, - "sentry.artifactbundleflatfileindex": { - "dangling": false, - "foreign_keys": { - "project_id": { - "kind": "ImplicitForeignKey", - "model": "sentry.project", - "nullable": false - } - }, - "model": "sentry.artifactbundleflatfileindex", - "relocation_dependencies": [], - "relocation_scope": "Excluded", - "silos": [ - "Region" - ], - "table_name": "sentry_artifactbundleflatfileindex", - "uniques": [ - [ - "dist_name", - "project_id", - "release_name" - ] - ] - }, "sentry.artifactbundleindex": { "dangling": false, "foreign_keys": { @@ -2119,34 +2095,6 @@ ] ] }, - "sentry.flatfileindexstate": { - "dangling": false, - "foreign_keys": { - "artifact_bundle": { - "kind": "FlexibleForeignKey", - "model": "sentry.artifactbundle", - "nullable": false - }, - "flat_file_index": { - "kind": "FlexibleForeignKey", - "model": "sentry.artifactbundleflatfileindex", - "nullable": false - } - }, - "model": "sentry.flatfileindexstate", - "relocation_dependencies": [], - "relocation_scope": "Excluded", - "silos": [ - "Region" - ], - "table_name": "sentry_flatfileindexstate", - "uniques": [ - [ - "artifact_bundle", - "flat_file_index" - ] - ] - }, "sentry.group": { "dangling": false, "foreign_keys": { @@ -3289,6 +3237,24 @@ ] ] }, + "sentry.monitorenvbrokendetection": { + "dangling": false, + "foreign_keys": { + "monitor_incident": { + "kind": "FlexibleForeignKey", + "model": "sentry.monitorincident", + "nullable": false + } + }, + "model": "sentry.monitorenvbrokendetection", + "relocation_dependencies": [], + "relocation_scope": "Excluded", + "silos": [ + "Region" + ], + "table_name": "sentry_monitorenvbrokendetection", + "uniques": [] + }, "sentry.monitorenvironment": { "dangling": false, "foreign_keys": { diff --git a/fixtures/backup/model_dependencies/flat.json b/fixtures/backup/model_dependencies/flat.json index d3b183546af522..de92ea42c392b3 100644 --- a/fixtures/backup/model_dependencies/flat.json +++ b/fixtures/backup/model_dependencies/flat.json @@ -106,9 +106,6 @@ "sentry.file", "sentry.organization" ], - "sentry.artifactbundleflatfileindex": [ - "sentry.project" - ], "sentry.artifactbundleindex": [ "sentry.artifactbundle", "sentry.organization" @@ -291,10 +288,6 @@ "sentry.fileblob", "sentry.organization" ], - "sentry.flatfileindexstate": [ - "sentry.artifactbundle", - "sentry.artifactbundleflatfileindex" - ], "sentry.group": [ "sentry.project", "sentry.release" @@ -456,6 +449,9 @@ "sentry.monitorlocation", "sentry.project" ], + "sentry.monitorenvbrokendetection": [ + "sentry.monitorincident" + ], "sentry.monitorenvironment": [ "sentry.environment", "sentry.monitor" diff --git a/fixtures/backup/model_dependencies/sorted.json b/fixtures/backup/model_dependencies/sorted.json index 6f268249602ca4..f8b0e4f4049cd3 100644 --- a/fixtures/backup/model_dependencies/sorted.json +++ b/fixtures/backup/model_dependencies/sorted.json @@ -109,7 +109,6 @@ "sentry.authidentity", "sentry.authenticator", "sentry.assistantactivity", - "sentry.artifactbundleflatfileindex", "sentry.artifactbundle", "sentry.appconnectbuild", "sentry.apikey", @@ -175,7 +174,6 @@ "sentry.groupcommitresolution", "sentry.groupbookmark", "sentry.groupassignee", - "sentry.flatfileindexstate", "sentry.fileblobindex", "sentry.exporteddatablob", "sentry.environmentproject", @@ -208,6 +206,7 @@ "sentry.organizationmemberteamreplica", "sentry.notificationactionproject", "sentry.monitorincident", + "sentry.monitorenvbrokendetection", "sentry.incident", "sentry.dashboardwidgetquery", "sentry.alertruletrigger", diff --git a/fixtures/backup/model_dependencies/truncate.json b/fixtures/backup/model_dependencies/truncate.json index ea15145708227c..f1bf22ceed0965 100644 --- a/fixtures/backup/model_dependencies/truncate.json +++ b/fixtures/backup/model_dependencies/truncate.json @@ -109,7 +109,6 @@ "sentry_authidentity", "auth_authenticator", "sentry_assistant_activity", - "sentry_artifactbundleflatfileindex", "sentry_artifactbundle", "sentry_appconnectbuild", "sentry_apikey", @@ -175,7 +174,6 @@ "sentry_groupcommitresolution", "sentry_groupbookmark", "sentry_groupasignee", - "sentry_flatfileindexstate", "sentry_fileblobindex", "sentry_exporteddatablob", "sentry_environmentproject", @@ -208,6 +206,7 @@ "sentry_organizationmember_teamsreplica", "sentry_notificationactionproject", "sentry_monitorincident", + "sentry_monitorenvbrokendetection", "sentry_incident", "sentry_dashboardwidgetquery", "sentry_alertruletrigger", diff --git a/fixtures/js-stubs/group.ts b/fixtures/js-stubs/group.ts index 6da2c6da9c79b0..27662363a478ee 100644 --- a/fixtures/js-stubs/group.ts +++ b/fixtures/js-stubs/group.ts @@ -39,6 +39,7 @@ export function GroupFixture(params: Partial = {}): Group { pluginContexts: [], pluginIssues: [], priority: PriorityLevel.MEDIUM, + priorityLockedAt: null, project: ProjectFixture({ platform: 'javascript', }), diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 09f771acb7fa5a..872543c08d7e1d 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0013_add_orgauthtokenreplica_token_index nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0660_fix_cron_monitor_invalid_orgs +sentry: 0666_monitor_incident_default_grouphash social_auth: 0002_default_auto_field diff --git a/pyproject.toml b/pyproject.toml index ce2b18186d3fea..13e699b27c5321 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -287,7 +287,6 @@ module = [ "sentry.incidents.endpoints.organization_incident_comment_details", "sentry.incidents.endpoints.organization_incident_index", "sentry.incidents.logic", - "sentry.incidents.models", "sentry.incidents.subscription_processor", "sentry.incidents.tasks", "sentry.integrations.aws_lambda.integration", @@ -371,9 +370,7 @@ module = [ "sentry.issues.endpoints.group_events", "sentry.issues.endpoints.organization_group_index", "sentry.issues.endpoints.source_map_debug", - "sentry.issues.occurrence_consumer", "sentry.issues.search", - "sentry.issues.status_change", "sentry.middleware.access_log", "sentry.middleware.auth", "sentry.middleware.ratelimit", @@ -627,6 +624,7 @@ disable_error_code = [ module = [ "sentry.buffer.base", "sentry.buffer.redis", + "sentry.eventstore.reprocessing.redis", "sentry.utils.redis", "sentry.utils.redis_metrics", "sentry.utils.locking.backends.redis", diff --git a/requirements-base.txt b/requirements-base.txt index fa6f922654d677..518dda18cf1ce4 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -65,7 +65,7 @@ sentry-arroyo>=2.16.2 sentry-kafka-schemas>=0.1.58 sentry-ophio==0.1.5 sentry-redis-tools>=0.1.7 -sentry-relay>=0.8.45 +sentry-relay>=0.8.48 sentry-sdk>=1.39.2 snuba-sdk>=2.0.29 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index bde98736cf313d..ae4c46b61f9e18 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -180,7 +180,7 @@ sentry-forked-djangorestframework-stubs==3.14.5.post1 sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 -sentry-relay==0.8.45 +sentry-relay==0.8.48 sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index b0ef79fddf89a1..004b1d338a4f08 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -122,7 +122,7 @@ sentry-arroyo==2.16.2 sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 -sentry-relay==0.8.45 +sentry-relay==0.8.48 sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 diff --git a/src/sentry/api/bases/incident.py b/src/sentry/api/bases/incident.py index a13b8fbdf855c4..aa18754573d9b3 100644 --- a/src/sentry/api/bases/incident.py +++ b/src/sentry/api/bases/incident.py @@ -4,7 +4,7 @@ from sentry import features from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission from sentry.api.exceptions import ResourceDoesNotExist -from sentry.incidents.models import Incident +from sentry.incidents.models.incident import Incident class IncidentPermission(OrganizationPermission): diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py index 8ba7c8b923c3f7..b058f931cbdf00 100644 --- a/src/sentry/api/bases/sentryapps.py +++ b/src/sentry/api/bases/sentryapps.py @@ -364,7 +364,10 @@ def has_object_permission(self, request: Request, view, installation): # TODO(hybrid-cloud): Replace this RPC with an org member lookup when that exists? org_context = organization_service.get_organization_by_id( - id=installation.organization_id, user_id=request.user.id + id=installation.organization_id, + user_id=request.user.id, + include_teams=False, + include_projects=False, ) if ( org_context.member is None diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py index 6944fc86f22b96..dfd764a6e30eff 100644 --- a/src/sentry/api/endpoints/auth_index.py +++ b/src/sentry/api/endpoints/auth_index.py @@ -1,6 +1,5 @@ import logging -from django.conf import settings from django.contrib.auth import logout from django.contrib.auth.models import AnonymousUser from django.utils.http import url_has_allowed_host_and_scheme @@ -33,10 +32,6 @@ PREFILLED_SU_MODAL_KEY = "prefilled_su_modal" -DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = getattr( - settings, "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL", False -) - @control_silo_endpoint class BaseAuthIndexEndpoint(Endpoint): @@ -66,7 +61,9 @@ def _reauthenticate_with_sso(request: Request, org_id: int) -> None: if not url_has_allowed_host_and_scheme(redirect, allowed_hosts=(request.get_host(),)): redirect = None initiate_login(request, redirect) - organization_context = organization_service.get_organization_by_id(id=org_id) + organization_context = organization_service.get_organization_by_id( + id=org_id, include_teams=False, include_projects=False + ) assert organization_context, "Failed to fetch organization in _reauthenticate_with_sso" raise SsoRequired( organization=organization_context.organization, @@ -153,6 +150,15 @@ def _validate_superuser( SSO and if they do not, we redirect them back to the SSO login. """ + logger.info( + "auth-index.validate_superuser", + extra={ + "validator": validator, + "user": request.user.id, + "raise_exception": not DISABLE_SSO_CHECK_FOR_LOCAL_DEV, + "verify_authenticator": verify_authenticator, + }, + ) # Disable exception for missing password or u2f code if we're running locally validator.is_valid(raise_exception=not DISABLE_SSO_CHECK_FOR_LOCAL_DEV) @@ -242,17 +248,17 @@ def put(self, request: Request) -> Response: if not DISABLE_SSO_CHECK_FOR_LOCAL_DEV and not is_self_hosted(): if Superuser.org_id: - superuser_org = organization_service.get_organization_by_id(id=Superuser.org_id) + superuser_org = organization_service.get_organization_by_id( + id=Superuser.org_id, include_teams=False, include_projects=False + ) - verify_authenticator = ( - False - if superuser_org is None - else features.has( + if superuser_org is not None: + has_u2f_flag = features.has( "organizations:u2f-superuser-form", superuser_org.organization, actor=request.user, ) - ) + verify_authenticator = has_u2f_flag if verify_authenticator: if not Authenticator.objects.filter( @@ -261,6 +267,15 @@ def put(self, request: Request) -> Response: return Response( {"detail": {"code": "no_u2f"}}, status=status.HTTP_403_FORBIDDEN ) + logger.info( + "auth-index.put", + extra={ + "organization": superuser_org, + "u2f_flag": has_u2f_flag, + "user": request.user.id, + "verify_authenticator": verify_authenticator, + }, + ) try: authenticated = self._validate_superuser(validator, request, verify_authenticator) except ValidationError: diff --git a/src/sentry/api/endpoints/chunk.py b/src/sentry/api/endpoints/chunk.py index fefc9541b01495..13e7d645d47536 100644 --- a/src/sentry/api/endpoints/chunk.py +++ b/src/sentry/api/endpoints/chunk.py @@ -14,6 +14,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.organization import OrganizationEndpoint, OrganizationReleasePermission +from sentry.api.utils import generate_region_url from sentry.models.files.fileblob import FileBlob from sentry.ratelimits.config import RateLimitConfig from sentry.utils.files import get_max_file_size @@ -81,7 +82,12 @@ def get(self, request: Request, organization) -> Response: url = relative_url.lstrip(API_PREFIX) # Otherwise, if we do not support them, return an absolute, versioned endpoint with a default, system-wide prefix else: - url = absolute_uri(relative_url) + # We need to generate region specific upload URLs when possible to avoid hitting the API proxy + # which tends to cause timeouts and performance issues for uploads. + base_url = None + if options.get("hybrid_cloud.use_region_specific_upload_url"): + base_url = generate_region_url() + url = absolute_uri(relative_url, base_url) else: # If user overridden upload url prefix, we want an absolute, versioned endpoint, with user-configured prefix url = absolute_uri(relative_url, endpoint) diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index e53edea874e4b5..a9db0ac70e500f 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -112,6 +112,7 @@ def _call_autofix( repos: list[dict], event_entries: list[dict], additional_context: str, + timeout_secs: int, ): response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v0/automation/autofix", @@ -123,10 +124,12 @@ def _call_autofix( "issue": { "id": group.id, "title": group.title, - "short_id": group.short_id, + "short_id": group.qualified_short_id, "events": [{"entries": event_entries}], }, "additional_context": additional_context, + "timeout_secs": timeout_secs, + "last_updated": datetime.now().isoformat(), "invoking_user": ( { "id": user.id, @@ -192,7 +195,12 @@ def post(self, request: Request, group: Group) -> Response: try: self._call_autofix( - request.user, group, repos, event_entries, data.get("additional_context", "") + request.user, + group, + repos, + event_entries, + data.get("additional_context", ""), + TIMEOUT_SECONDS, ) # Mark the task as completed after TIMEOUT_SECONDS diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/details.py b/src/sentry/api/endpoints/integrations/sentry_apps/details.py index 1248e6c6fa601b..78d52975bdef9a 100644 --- a/src/sentry/api/endpoints/integrations/sentry_apps/details.py +++ b/src/sentry/api/endpoints/integrations/sentry_apps/details.py @@ -58,7 +58,7 @@ def put(self, request: Request, sentry_app) -> Response: status=403, ) owner_context = organization_service.get_organization_by_id( - id=sentry_app.owner_id, user_id=None + id=sentry_app.owner_id, user_id=None, include_projects=False, include_teams=False ) if ( owner_context diff --git a/src/sentry/api/endpoints/organization_ddm.py b/src/sentry/api/endpoints/organization_ddm.py index 8e1d01e9db3e41..fec9593b918e04 100644 --- a/src/sentry/api/endpoints/organization_ddm.py +++ b/src/sentry/api/endpoints/organization_ddm.py @@ -17,10 +17,7 @@ from sentry.exceptions import InvalidParams from sentry.models.organization import Organization from sentry.models.project import Project -from sentry.sentry_metrics.querying.errors import ( - LatestReleaseNotFoundError, - TooManyCodeLocationsRequestedError, -) +from sentry.sentry_metrics.querying.errors import LatestReleaseNotFoundError from sentry.sentry_metrics.querying.metadata import ( MetricCodeLocations, MetricCorrelations, @@ -126,8 +123,6 @@ def get(self, request: Request, organization) -> Response: ) except LatestReleaseNotFoundError as e: return Response(status=404, data={"detail": str(e)}) - except TooManyCodeLocationsRequestedError as e: - return Response(status=400, data={"detail": str(e)}) response[meta_type.value] = serialize( data, request.user, METRIC_META_TYPE_SERIALIZER[meta_type.value] diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 03f6d5361bdef7..be83db23f3dbbe 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -26,7 +26,7 @@ from sentry.snuba.metrics.extraction import MetricSpecType from sentry.snuba.referrer import Referrer from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils.snuba import SnubaError, SnubaTSResult +from sentry.utils.snuba import SnubaError logger = logging.getLogger(__name__) @@ -324,8 +324,13 @@ def fn(offset, limit) -> dict[str, Any]: try: widget = DashboardWidget.objects.get(id=dashboard_widget_id) does_widget_have_split = widget.discover_widget_split is not None + has_override_feature = features.has( + "organizations:performance-discover-widget-split-override-save", + organization, + actor=request.user, + ) - if does_widget_have_split: + if does_widget_have_split and not has_override_feature: # This is essentially cached behaviour and we skip the check split_query = scoped_query if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS: @@ -347,12 +352,15 @@ def fn(offset, limit) -> dict[str, Any]: has_errors = len(error_results["data"]) > 0 except SnubaError: has_errors = False + error_results = None original_results = _data_fn(scopedDataset, offset, limit, scoped_query) - if isinstance(original_results, SnubaTSResult): - dataset_meta = original_results.data.get("meta", {}) + if original_results.get("data"): + dataset_meta = original_results.get("data").get("meta", {}) else: - dataset_meta = list(original_results.values())[0].data.get("meta", {}) + dataset_meta = ( + list(original_results.values())[0].get("data").get("meta", {}) + ) using_metrics = dataset_meta.get("isMetricsData", False) or dataset_meta.get( "isMetricsExtractedData", False ) diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 357980de4e12d4..7f237696d793e5 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -16,7 +16,6 @@ from sentry.models.organization import Organization from sentry.snuba import ( discover, - errors, functions, metrics_enhanced_performance, metrics_performance, @@ -214,7 +213,6 @@ def get(self, request: Request, organization: Organization) -> Response: if dataset in [ discover, - errors, functions, metrics_performance, metrics_enhanced_performance, @@ -323,8 +321,13 @@ def fn( try: widget = DashboardWidget.objects.get(id=dashboard_widget_id) does_widget_have_split = widget.discover_widget_split is not None + has_override_feature = features.has( + "organizations:performance-discover-widget-split-override-save", + organization, + actor=request.user, + ) - if does_widget_have_split: + if does_widget_have_split and not has_override_feature: # This is essentially cached behaviour and we skip the check split_query = query if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS: diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index f8a0f9c0a203f3..39038ffe4c82ea 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -35,7 +35,6 @@ InvalidMetricsQueryError, LatestReleaseNotFoundError, MetricsQueryExecutionError, - TooManyCodeLocationsRequestedError, ) from sentry.sentry_metrics.querying.metadata import MetricCodeLocations, get_metric_code_locations from sentry.sentry_metrics.querying.samples_list import get_sample_list_executor_cls @@ -67,6 +66,13 @@ class MetricMetaType(Enum): MetricMetaType.CODE_LOCATIONS.value: MetricCodeLocationsSerializer(), } +DEFAULT_USE_CASE_IDS = [ + UseCaseID.TRANSACTIONS, + UseCaseID.SESSIONS, + UseCaseID.SPANS, + UseCaseID.CUSTOM, +] + def get_use_case_id(request: Request) -> UseCaseID: """ @@ -83,6 +89,22 @@ def get_use_case_id(request: Request) -> UseCaseID: ) +def get_use_case_ids(request: Request) -> Sequence[UseCaseID]: + """ + Gets use case ids from the query params and validates them again the `UseCaseID` enum type. + + If an empty list is supplied, the use case ids in `DEFAULT_USE_CASE_IDS` will be used. + """ + + try: + use_case_params = request.GET.getlist("useCase", DEFAULT_USE_CASE_IDS) + return [string_to_use_case_id(use_case_param) for use_case_param in use_case_params] + except ValueError: + raise ParseError( + detail=f"Invalid useCase parameter. Please use one of: {[uc.value for uc in UseCaseID]}" + ) + + @region_silo_endpoint class OrganizationMetricsDetailsEndpoint(OrganizationEndpoint): publish_status = { @@ -101,7 +123,7 @@ def get(self, request: Request, organization) -> Response: start, end = get_date_range_from_params(request.GET) metrics = get_metrics_meta( - projects=projects, use_case_id=get_use_case_id(request), start=start, end=end + projects=projects, use_case_ids=get_use_case_ids(request), start=start, end=end ) return Response(metrics, status=200) @@ -452,6 +474,7 @@ class MetricsSamplesSerializer(serializers.Serializer): field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) max = serializers.FloatField(required=False) min = serializers.FloatField(required=False) + operation = serializers.CharField(required=False) query = serializers.CharField(required=False) referrer = serializers.CharField(required=False) sort = serializers.CharField(required=False) @@ -510,6 +533,7 @@ def get(self, request: Request, organization: Organization) -> Response: params, snuba_params, serialized["field"], + serialized.get("operation"), serialized.get("query", ""), serialized.get("min"), serialized.get("max"), @@ -585,8 +609,6 @@ def get(self, request: Request, organization) -> Response: ) except LatestReleaseNotFoundError as e: return Response(status=404, data={"detail": str(e)}) - except TooManyCodeLocationsRequestedError as e: - return Response(status=400, data={"detail": str(e)}) response[meta_type.value] = serialize( data, request.user, METRIC_META_TYPE_SERIALIZER[meta_type.value] diff --git a/src/sentry/api/endpoints/organization_profiling_functions.py b/src/sentry/api/endpoints/organization_profiling_functions.py index 4ca349f751defb..bb8f4aa2084426 100644 --- a/src/sentry/api/endpoints/organization_profiling_functions.py +++ b/src/sentry/api/endpoints/organization_profiling_functions.py @@ -91,24 +91,25 @@ def get(self, request: Request, organization: Organization) -> Response: return Response(serializer.errors, status=400) data = serializer.validated_data - top_functions = functions.query( - selected_columns=[ - "project.id", - "fingerprint", - "package", - "function", - "count()", - "examples()", - ], - query=data.get("query"), - params=params, - orderby=["-count()"], - limit=TOP_FUNCTIONS_LIMIT, - referrer=Referrer.API_PROFILING_FUNCTION_TRENDS_TOP_EVENTS.value, - auto_aggregations=True, - use_aggregate_conditions=True, - transform_alias_to_input_format=True, - ) + with handle_query_errors(): + top_functions = functions.query( + selected_columns=[ + "project.id", + "fingerprint", + "package", + "function", + "count()", + "examples()", + ], + query=data.get("query"), + params=params, + orderby=["-count()"], + limit=TOP_FUNCTIONS_LIMIT, + referrer=Referrer.API_PROFILING_FUNCTION_TRENDS_TOP_EVENTS.value, + auto_aggregations=True, + use_aggregate_conditions=True, + transform_alias_to_input_format=True, + ) def get_event_stats(_columns, query, params, _rollup, zerofill_results, _comparison_delta): rollup = get_rollup_from_range(params["end"] - params["start"]) diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py index d6ca3460395363..a5f85689a7394f 100644 --- a/src/sentry/api/endpoints/project_details.py +++ b/src/sentry/api/endpoints/project_details.py @@ -817,10 +817,10 @@ def put(self, request: Request, project) -> Response: "sentry:replay_rage_click_issues", bool(options["sentry:replay_rage_click_issues"]), ) - if "sentry:feedback_user_report_notification" in options: + if "sentry:feedback_user_report_notifications" in options: project.update_option( - "sentry:feedback_user_report_notification", - bool(options["sentry:feedback_user_report_notification"]), + "sentry:feedback_user_report_notifications", + bool(options["sentry:feedback_user_report_notifications"]), ) if "sentry:feedback_ai_spam_detection" in options: project.update_option( diff --git a/src/sentry/api/endpoints/rule_snooze.py b/src/sentry/api/endpoints/rule_snooze.py index e1cf268de7205d..98eb0eb9423a9c 100644 --- a/src/sentry/api/endpoints/rule_snooze.py +++ b/src/sentry/api/endpoints/rule_snooze.py @@ -12,7 +12,7 @@ from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint from sentry.api.serializers import Serializer, register, serialize from sentry.api.serializers.rest_framework.base import CamelSnakeSerializer -from sentry.incidents.models import AlertRule +from sentry.incidents.models.alert_rule import AlertRule from sentry.models.organization import Organization from sentry.models.organizationmember import OrganizationMember from sentry.models.rule import Rule diff --git a/src/sentry/api/endpoints/seer_rpc.py b/src/sentry/api/endpoints/seer_rpc.py index c2c2537721b15b..cde7f37b58a68e 100644 --- a/src/sentry/api/endpoints/seer_rpc.py +++ b/src/sentry/api/endpoints/seer_rpc.py @@ -5,6 +5,7 @@ from django.conf import settings from django.contrib.auth.models import AnonymousUser +from django.core.exceptions import ObjectDoesNotExist from rest_framework.exceptions import ( AuthenticationFailed, NotFound, @@ -25,6 +26,7 @@ from sentry.services.hybrid_cloud.sig import SerializableFunctionValueException from sentry.silo.base import SiloMode from sentry.utils import json +from sentry.utils.env import in_test_environment def compare_signature(url: str, body: bytes, signature: str) -> bool: @@ -131,8 +133,13 @@ def post(self, request: Request, method_name: str) -> Response: except SerializableFunctionValueException as e: capture_exception() raise ParseError from e + except ObjectDoesNotExist as e: + # Let this fall through, this is normal. + capture_exception() + raise NotFound from e except Exception as e: - # Produce more detailed log + if in_test_environment(): + raise if settings.DEBUG: raise Exception(f"Problem processing seer rpc endpoint {method_name}") from e capture_exception() @@ -174,7 +181,36 @@ def on_autofix_complete(*, issue_id: int, status: str, steps: list[dict], fix: d group.save() +def get_autofix_state(*, issue_id: int) -> dict: + group: Group = Group.objects.get(id=issue_id) + + metadata = group.data.get("metadata", {}) + autofix_data = metadata.get("autofix", {}) + + return autofix_data + + seer_method_registry = { "on_autofix_step_update": on_autofix_step_update, "on_autofix_complete": on_autofix_complete, + "get_autofix_state": get_autofix_state, } + + +def generate_request_signature(url_path: str, body: bytes) -> str: + """ + Generate a signature for the request body + with the first shared secret. If there are other + shared secrets in the list they are only to be used + for verfication during key rotation. + """ + if not settings.SEER_RPC_SHARED_SECRET: + raise RpcAuthenticationSetupException("Cannot sign RPC requests without RPC_SHARED_SECRET") + + signature_input = b"%s:%s" % ( + url_path.encode("utf8"), + body, + ) + secret = settings.SEER_RPC_SHARED_SECRET[0] + signature = hmac.new(secret.encode("utf-8"), signature_input, hashlib.sha256).hexdigest() + return f"rpc0:{signature}" diff --git a/src/sentry/api/endpoints/team_alerts_triggered.py b/src/sentry/api/endpoints/team_alerts_triggered.py index 60ab51d1fd351a..7ba02e36b837ab 100644 --- a/src/sentry/api/endpoints/team_alerts_triggered.py +++ b/src/sentry/api/endpoints/team_alerts_triggered.py @@ -14,8 +14,8 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.alert_rule import AlertRuleSerializer from sentry.api.utils import get_date_range_from_params -from sentry.incidents.models import ( - AlertRule, +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import ( IncidentActivity, IncidentActivityType, IncidentProject, diff --git a/src/sentry/api/endpoints/team_projects.py b/src/sentry/api/endpoints/team_projects.py index 50604514966f8e..108215210be5d2 100644 --- a/src/sentry/api/endpoints/team_projects.py +++ b/src/sentry/api/endpoints/team_projects.py @@ -4,7 +4,7 @@ from rest_framework.request import Request from rest_framework.response import Response -from sentry import audit_log, features +from sentry import audit_log from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import EnvironmentMixin, region_silo_endpoint @@ -182,11 +182,7 @@ def post(self, request: Request, team) -> Response: # XXX: create sample event? # Turns on some inbound filters by default for new Javascript platform projects - if ( - features.has("organizations:default-inbound-filters", team.organization) - and project.platform - and project.platform.startswith("javascript") - ): + if project.platform and project.platform.startswith("javascript"): set_default_inbound_filters(project, team.organization) self.create_audit_entry( diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index 8f627049fd9ff1..a5b82494774ee4 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -597,7 +597,6 @@ def update_groups( acting_user=acting_user, status_details=result.get("statusDetails", {}), sender=update_groups, - activity_type=activity_type, ) # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of diff --git a/src/sentry/api/permissions.py b/src/sentry/api/permissions.py index afba82240fecaf..333fb579833cc6 100644 --- a/src/sentry/api/permissions.py +++ b/src/sentry/api/permissions.py @@ -78,16 +78,36 @@ class (that is not StaffPermission) require this mixin because staff does not gi staff_allowed_methods = {"GET", "POST", "PUT", "DELETE"} def has_permission(self, request, *args, **kwargs) -> bool: - # Check for staff before calling super to avoid catching exceptions from super - if request.method in self.staff_allowed_methods and is_active_staff(request): + """ + Calls the parent class's has_permission method. If it returns False or + raises an exception and the method is allowed by the mixin, we then check + if the request is from an active staff. Raised exceptions are not caught + if the request is not allowed by the mixin or from an active staff. + """ + try: + if super().has_permission(request, *args, **kwargs): + return True + except Exception: + if not (request.method in self.staff_allowed_methods and is_active_staff(request)): + raise return True - return super().has_permission(request, *args, **kwargs) + return request.method in self.staff_allowed_methods and is_active_staff(request) def has_object_permission(self, request, *args, **kwargs) -> bool: - # Check for staff before calling super to avoid catching exceptions from super - if request.method in self.staff_allowed_methods and is_active_staff(request): + """ + Calls the parent class's has_object_permission method. If it returns False or + raises an exception and the method is allowed by the mixin, we then check + if the request is from an active staff. Raised exceptions are not caught + if the request is not allowed by the mixin or from an active staff. + """ + try: + if super().has_object_permission(request, *args, **kwargs): + return True + except Exception: + if not (request.method in self.staff_allowed_methods and is_active_staff(request)): + raise return True - return super().has_object_permission(request, *args, **kwargs) + return request.method in self.staff_allowed_methods and is_active_staff(request) def is_not_2fa_compliant(self, request, *args, **kwargs) -> bool: return super().is_not_2fa_compliant(request, *args, **kwargs) and not is_active_staff( diff --git a/src/sentry/api/serializers/models/alert_rule.py b/src/sentry/api/serializers/models/alert_rule.py index 063560be0bf40e..ae3bf16b8fcedd 100644 --- a/src/sentry/api/serializers/models/alert_rule.py +++ b/src/sentry/api/serializers/models/alert_rule.py @@ -11,7 +11,7 @@ from sentry import features from sentry.api.serializers import Serializer, register, serialize from sentry.api.serializers.models.rule import RuleSerializer -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import ( AlertRule, AlertRuleActivity, AlertRuleActivityType, @@ -19,8 +19,8 @@ AlertRuleMonitorType, AlertRuleTrigger, AlertRuleTriggerAction, - Incident, ) +from sentry.incidents.models.incident import Incident from sentry.models.actor import ACTOR_TYPES, Actor, actor_type_to_string from sentry.models.rule import Rule from sentry.models.rulesnooze import RuleSnooze diff --git a/src/sentry/api/serializers/models/alert_rule_trigger.py b/src/sentry/api/serializers/models/alert_rule_trigger.py index da668a14342490..c7bb264383077f 100644 --- a/src/sentry/api/serializers/models/alert_rule_trigger.py +++ b/src/sentry/api/serializers/models/alert_rule_trigger.py @@ -5,7 +5,7 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.incidents.endpoints.utils import translate_threshold -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import ( AlertRuleTrigger, AlertRuleTriggerAction, AlertRuleTriggerExclusion, diff --git a/src/sentry/api/serializers/models/alert_rule_trigger_action.py b/src/sentry/api/serializers/models/alert_rule_trigger_action.py index 060d6f4d0615a6..7e552f9d6c2e23 100644 --- a/src/sentry/api/serializers/models/alert_rule_trigger_action.py +++ b/src/sentry/api/serializers/models/alert_rule_trigger_action.py @@ -1,7 +1,7 @@ import logging from sentry.api.serializers import Serializer, register -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction logger = logging.getLogger(__name__) diff --git a/src/sentry/api/serializers/models/incident.py b/src/sentry/api/serializers/models/incident.py index 29e3c48a89c779..2ec3f96f7dd536 100644 --- a/src/sentry/api/serializers/models/incident.py +++ b/src/sentry/api/serializers/models/incident.py @@ -4,7 +4,7 @@ from sentry.api.serializers import Serializer, register, serialize from sentry.api.serializers.models.alert_rule import AlertRuleSerializer -from sentry.incidents.models import ( +from sentry.incidents.models.incident import ( Incident, IncidentActivity, IncidentProject, diff --git a/src/sentry/api/serializers/models/incidentactivity.py b/src/sentry/api/serializers/models/incidentactivity.py index 36c35db2077d0b..598e5db1ebe86a 100644 --- a/src/sentry/api/serializers/models/incidentactivity.py +++ b/src/sentry/api/serializers/models/incidentactivity.py @@ -1,7 +1,7 @@ from django.db.models import prefetch_related_objects from sentry.api.serializers import Serializer, register -from sentry.incidents.models import IncidentActivity +from sentry.incidents.models.incident import IncidentActivity from sentry.services.hybrid_cloud.user.serial import serialize_generic_user from sentry.services.hybrid_cloud.user.service import user_service diff --git a/src/sentry/api/serializers/models/incidentseen.py b/src/sentry/api/serializers/models/incidentseen.py index d9e92469c12640..ec96c57523529f 100644 --- a/src/sentry/api/serializers/models/incidentseen.py +++ b/src/sentry/api/serializers/models/incidentseen.py @@ -1,5 +1,5 @@ from sentry.api.serializers import Serializer, register -from sentry.incidents.models import IncidentSeen +from sentry.incidents.models.incident import IncidentSeen from sentry.services.hybrid_cloud.user.serial import serialize_generic_user from sentry.services.hybrid_cloud.user.service import user_service diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index 4475c5146e411d..59c2519e113013 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -233,8 +233,8 @@ def format_options(attrs: dict[str, Any]) -> dict[str, Any]: options.get(f"sentry:{FilterTypes.ERROR_MESSAGES}", []) ), "feedback:branding": options.get("feedback:branding", "1") == "1", - "sentry:feedback_user_report_notification": bool( - options.get("sentry:feedback_user_report_notification") + "sentry:feedback_user_report_notifications": bool( + options.get("sentry:feedback_user_report_notifications") ), "sentry:feedback_ai_spam_detection": bool(options.get("sentry:feedback_ai_spam_detection")), "sentry:replay_rage_click_issues": options.get("sentry:replay_rage_click_issues"), diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py index ace64749aca00d..56d9d903b3d72c 100644 --- a/src/sentry/api/utils.py +++ b/src/sentry/api/utils.py @@ -20,6 +20,7 @@ from rest_framework.exceptions import APIException, ParseError from rest_framework.request import Request from sentry_sdk import Scope +from urllib3.exceptions import MaxRetryError, ReadTimeoutError from sentry import options from sentry.auth.staff import is_active_staff @@ -424,6 +425,7 @@ def handle_query_errors() -> Generator[None, None, None]: error, ( RateLimitExceeded, + ReadTimeoutError, QueryMemoryLimitExceeded, QueryExecutionTimeMaximum, QueryTooManySimultaneous, @@ -447,6 +449,12 @@ def handle_query_errors() -> Generator[None, None, None]: ): sentry_sdk.capture_exception(error) message = "Internal error. Your query failed to run." + elif isinstance( + error, + (MaxRetryError), + ): + sentry_sdk.capture_message(str(error), level="warning") + message = "Internal error. Your query failed to run." else: sentry_sdk.capture_exception(error) raise APIException(detail=message) diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py index f2f15518f24048..5747c7ea55c556 100644 --- a/src/sentry/auth/superuser.py +++ b/src/sentry/auth/superuser.py @@ -179,7 +179,15 @@ def __init__(self, request, allowed_ips=UNSET, org_id=UNSET, current_datetime=No @staticmethod def _needs_validation(): - if is_self_hosted() or DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL: + self_hosted = is_self_hosted() + logger.info( + "superuser.needs-validation", + extra={ + "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL": DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL, + "self_hosted": self_hosted, + }, + ) + if self_hosted or DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL: return False return settings.VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON diff --git a/src/sentry/backup/imports.py b/src/sentry/backup/imports.py index c65ee6ca0d8624..85b1a65fe55a18 100644 --- a/src/sentry/backup/imports.py +++ b/src/sentry/backup/imports.py @@ -47,6 +47,20 @@ "import_in_global_scope", ) +# We have to be careful when removing fields from our model schemas, since exports created using +# the old-but-still-in-the-support-window versions could have those fields set in the data they +# provide. This dict serves as a map of all fields that have been deleted on HEAD but are still +# valid in at least one of the versions we support. For example, since our current version +# support window is two minor versions back, if we delete a field at version 24.5.N, we must +# include an entry in this map for that field until that version is out of the support window +# (in this case, we can remove shim once version 24.7.0 is released). +# +# NOTE TO FUTURE EDITORS: please keep the `DELETED_FIELDS` dict, and the subsequent `if` clause, +# around even if the dict is empty, to ensure that there is a ready place to pop shims into. For +# each entry in this dict, please leave a TODO comment pointed to a github issue for removing +# the shim, noting in the comment which self-hosted release will trigger the removal. +DELETED_FIELDS: dict[str, set[str]] = {} + class ImportingError(Exception): def __init__(self, context: RpcImportError) -> None: @@ -135,6 +149,21 @@ def _import( if decryptor is not None else src.read().decode("utf-8") ) + + if len(DELETED_FIELDS) > 0: + # Parse the content JSON and remove and fields that we have marked for deletion in the + # function. + shimmed_models = set(DELETED_FIELDS.keys()) + content_as_json = json.loads(content) # type: ignore + for json_model in content_as_json: + if json_model["model"] in shimmed_models: + fields_to_remove = DELETED_FIELDS[json_model["model"]] + for field in fields_to_remove: + json_model["fields"].pop(field, None) + + # Return the content to byte form, as that is what the Django deserializer expects. + content = json.dumps(content_as_json) + filters = [] if filter_by is not None: filters.append(filter_by) diff --git a/src/sentry/conf/api_pagination_allowlist_do_not_modify.py b/src/sentry/conf/api_pagination_allowlist_do_not_modify.py new file mode 100644 index 00000000000000..445db3cad1a5ce --- /dev/null +++ b/src/sentry/conf/api_pagination_allowlist_do_not_modify.py @@ -0,0 +1,101 @@ +""" + This list is tracking old api endpoints that don't correctly implement pagination. + The goal is to eventually add pagination for all and shrink this list. + DO NOT ADD ANY NEW APIS +""" +SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY = { + "GroupTagsEndpoint", + "GroupIntegrationsEndpoint", + "ProjectServiceHookStatsEndpoint", + "OrganizationProcessingIssuesEndpoint", + "OrganizationEnvironmentsEndpoint", + "OrganizationMetricsTagDetailsEndpoint", + "OrganizationProjectsEndpoint", + "SentryInternalAppTokensEndpoint", + "ProjectPlatformsEndpoint", + "UserRolesEndpoint", + "AuthenticatorIndexEndpoint", + "OrganizationUserTeamsEndpoint", + "GroupParticipantsEndpoint", + "AssistantEndpoint", + "ApiTokensEndpoint", + "UserPermissionsConfigEndpoint", + "ProjectFiltersEndpoint", + "InternalQueueTasksEndpoint", + "TeamStatsEndpoint", + "OrganizationPluginsConfigsEndpoint", + "ProjectIssuesResolvedInReleaseEndpoint", + "OrganizationUsersEndpoint", + "ProjectEnvironmentsEndpoint", + "ProjectUserStatsEndpoint", + "ProjectUsersEndpoint", + "OrganizationEventsRootCauseAnalysisEndpoint", + "UserAuthenticatorIndexEndpoint", + "OrganizationDeriveCodeMappingsEndpoint", + "UserUserRolesEndpoint", + "UserSocialIdentitiesIndexEndpoint", + "OrganizationEventsNewTrendsStatsEndpoint", + "OrganizationMetricsTagsEndpoint", + "OrganizationIntegrationServerlessFunctionsEndpoint", + "OrganizationRepositoriesEndpoint", + "OrganizationSentryFunctionEndpoint", + "GroupSimilarIssuesEmbeddingsEndpoint", + "OrganizationMissingMembersEndpoint", + "OrganizationSdkUpdatesEndpoint", + "OrganizationUserReportsEndpoint", + "OrganizationReleasesEndpoint", + "ProjectGroupingConfigsEndpoint", + "OrganizationAccessRequestDetailsEndpoint", + "OrganizationIndexEndpoint", + "ProjectPluginsEndpoint", + "TeamGroupsOldEndpoint", + "UserPermissionsEndpoint", + "ReleaseThresholdEndpoint", + "UserIdentityConfigEndpoint", + "UserSubscriptionsEndpoint", + "BuiltinSymbolSourcesEndpoint", + "OrganizationRelayUsage", + "OrganizationApiKeyIndexEndpoint", + "OrganizationMetricsDetailsEndpoint", + "GroupStatsEndpoint", + "ProjectMemberIndexEndpoint", + "ProjectReleaseSetupCompletionEndpoint", + "OrganizationPluginsEndpoint", + "SentryAppRequestsEndpoint", + "ProjectSymbolSourcesEndpoint", + "SentryAppsStatsEndpoint", + "OrganizationStatsEndpoint", + "ProjectArtifactLookupEndpoint", + "UserNotificationSettingsProvidersEndpoint", + "IntegrationFeaturesEndpoint", + "OrganizationAuthProvidersEndpoint", + "UserEmailsEndpoint", + "ProjectCodeOwnersEndpoint", + "OrganizationUserDetailsEndpoint", + "OrgAuthTokensEndpoint", + "OrganizationProfilingFiltersEndpoint", + "GroupingConfigsEndpoint", + "ProjectStatsEndpoint", + "OrganizationRecentSearchesEndpoint", + "UserNotificationSettingsOptionsEndpoint", + "ProjectAgnosticRuleConditionsEndpoint", + "JiraServerSearchEndpointTest", + "GroupEventsEndpoint", + "PluginGroupEndpoint", + "KeyTransactionEndpoint", + "DiscoverSavedQueriesEndpoint", + "OrganizationEventsFacetsEndpoint", + "OrganizationEventsRelatedIssuesEndpoint", + "OrganizationEventsSpansHistogramEndpoint", + "OrganizationEventsVitalsEndpoint", + "OrganizationGroupIndexStatsEndpoint", + "OrganizationIssuesResolvedInReleaseEndpoint", + "OrganizationTagsEndpoint", + "ProjectGroupIndexEndpoint", + "ProjectTagsEndpoint", + "OrganizationSearchesEndpoint", + "OrganizationAlertRuleAvailableActionIndexEndpoint", + "JiraSearchEndpoint", + "GithubSharedSearchEndpoint", + "ProjectMonitorStatsEndpoint", +} diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 951c76fcf570a7..11d4deeb1a1574 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -17,11 +17,13 @@ from urllib.parse import urlparse import sentry +from sentry.conf.api_pagination_allowlist_do_not_modify import ( + SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY, +) from sentry.conf.types.kafka_definition import ConsumerDefinition from sentry.conf.types.logging_config import LoggingConfig from sentry.conf.types.role_dict import RoleDict from sentry.conf.types.sdk_config import ServerSdkConfig -from sentry.conf.types.topic_definition import TopicDefinition from sentry.utils import json # NOQA (used in getsentry config) from sentry.utils.celery import crontab_with_minute_jitter from sentry.utils.types import Type, type_from_value @@ -1455,6 +1457,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "auth:enterprise-superuser-read-write": False, # Enables user registration. "auth:register": True, + # Enables activated alert rules + "organizations:activated-alert-rules": False, # Enable advanced search features, like negation and wildcard matching. "organizations:advanced-search": True, # Enables alert creation on indexed events in UI (use for PoC/testing only) @@ -1480,8 +1484,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:codecov-commit-sha-from-git-blame": False, # The overall flag for codecov integration, gated by plans. "organizations:codecov-integration": False, - # Enable the Commit Context feature - "organizations:commit-context": True, # Enable alerting based on crash free sessions/users "organizations:crash-rate-alerts": True, # Enable creating organizations within sentry @@ -1517,8 +1519,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:dashboards-rh-widget": False, # Enables experimental WIP ddm related features "organizations:ddm-experimental": False, - # Enables ddm formula features - "organizations:ddm-formulas": False, # Delightful Developer Metrics (DDM): # Enable sidebar menu item and all UI (requires custom-metrics flag as well) "organizations:ddm-ui": False, @@ -1528,10 +1528,10 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:ddm-dashboard-import": False, # Enable the default alert at project creation to be the high priority alert "organizations:default-high-priority-alerts": False, - # Enable inbound filters to be turned on by default for new Javascript Projects - "organizations:default-inbound-filters": False, # Enables automatically deriving of code mappings "organizations:derive-code-mappings": True, + # Enables automatically deriving of PHP code mappings + "organizations:derive-code-mappings-php": False, # Enable device.class as a selectable column "organizations:device-classification": False, # Enables synthesis of device.class in ingest @@ -1721,6 +1721,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:performance-database-view-percentiles": False, # Enable UI sending a discover split for widget "organizations:performance-discover-widget-split-ui": False, + # Enable backend overriding and always making a fresh split decision + "organizations:performance-discover-widget-split-override-save": False, # Enables updated all events tab in a performance issue "organizations:performance-issues-all-events-tab": False, # Enable compressed assets performance issue type @@ -1847,10 +1849,10 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:session-replay-enable-canvas": False, # Enable canvas replaying "organizations:session-replay-enable-canvas-replayer": False, - # Enable replay event linking in event processing - "organizations:session-replay-event-linking": False, # Enable linking from 'new issue' email notifs to the issue replay list "organizations:session-replay-issue-emails": False, + # Enable mobile replay player + "organizations:session-replay-mobile-player": False, # Enable the new event linking columns to be queried "organizations:session-replay-new-event-counts": False, # Enable Rage Click Issue Creation In Recording Consumer @@ -2405,6 +2407,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: # Secret key for OpenAI OPENAI_API_KEY: str | None = None +SENTRY_API_PAGINATION_ALLOWLIST = SENTRY_API_PAGINATION_ALLOWLIST_DO_NOT_MODIFY + SENTRY_SCOPES = { "org:read", "org:write", @@ -3459,6 +3463,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS = "sessions-subscription-results" KAFKA_METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" KAFKA_INGEST_EVENTS = "ingest-events" +KAFKA_INGEST_FEEDBACK_EVENTS = "ingest-feedback-events" KAFKA_INGEST_EVENTS_DLQ = "ingest-events-dlq" KAFKA_INGEST_ATTACHMENTS = "ingest-attachments" KAFKA_INGEST_TRANSACTIONS = "ingest-transactions" @@ -3511,6 +3516,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] "sessions-subscription-results": "default", "metrics-subscription-results": "default", "ingest-events": "default", + "ingest-feedback-events": "default", "ingest-attachments": "default", "ingest-transactions": "default", "ingest-metrics": "default", @@ -3531,9 +3537,16 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] "shared-resources-usage": "default", } +from typing import TypedDict + + +class LegacyTopicDefinition(TypedDict): + cluster: str + + # Cluster configuration for each Kafka topic by name. # DEPRECATED -KAFKA_TOPICS: Mapping[str, TopicDefinition] = { +KAFKA_TOPICS: Mapping[str, LegacyTopicDefinition] = { KAFKA_EVENTS: {"cluster": "default"}, KAFKA_EVENTS_COMMIT_LOG: {"cluster": "default"}, KAFKA_TRANSACTIONS: {"cluster": "default"}, @@ -3661,6 +3674,10 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] SENTRY_USE_UWSGI = True +# Configure service wrapper for reprocessing2 state +SENTRY_REPROCESSING_STORE = "sentry.eventstore.reprocessing.redis.RedisReprocessingStore" +SENTRY_REPROCESSING_STORE_OPTIONS = {"cluster": "default"} + # When copying attachments for to-be-reprocessed events into processing store, # how large is an individual file chunk? Each chunk is stored as Redis key. SENTRY_REPROCESSING_ATTACHMENT_CHUNK_SIZE = 2**20 @@ -3834,7 +3851,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] ENABLE_ANALYTICS = False MAX_SLOW_CONDITION_ISSUE_ALERTS = 100 -MAX_MORE_SLOW_CONDITION_ISSUE_ALERTS = 200 +MAX_MORE_SLOW_CONDITION_ISSUE_ALERTS = 300 MAX_FAST_CONDITION_ISSUE_ALERTS = 500 MAX_QUERY_SUBSCRIPTIONS_PER_ORG = 1000 diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index 61820572647de8..2bdf08bad8684f 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import Callable, Mapping, Sequence +from collections.abc import Mapping, Sequence from enum import Enum from typing import Any, Required, TypedDict @@ -26,6 +26,7 @@ class Topic(Enum): METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" INGEST_EVENTS = "ingest-events" INGEST_EVENTS_DLQ = "ingest-events-dlq" + INGEST_FEEDBACK_EVENTS = "ingest-feedback-events" INGEST_ATTACHMENTS = "ingest-attachments" INGEST_TRANSACTIONS = "ingest-transactions" INGEST_METRICS = "ingest-metrics" @@ -48,11 +49,8 @@ class Topic(Enum): class ConsumerDefinition(TypedDict, total=False): - # XXX: Eventually only Topic will be accepted here. - # For backward compatibility with getsentry, we must also - # support the physical override topic name (str, Callable[str], str) - # while the migration is taking place - topic: Required[Topic | str | Callable[[], str]] + # Default topic + topic: Required[Topic] # Schema validation will be run if true validate_schema: bool | None @@ -70,7 +68,7 @@ class ConsumerDefinition(TypedDict, total=False): synchronize_commit_group_default: str synchronize_commit_log_topic_default: str - dlq_topic: str + dlq_topic: Topic dlq_max_invalid_ratio: float | None dlq_max_consecutive_count: int | None diff --git a/src/sentry/conf/types/topic_definition.py b/src/sentry/conf/types/topic_definition.py index 41992b74d9ad78..bc5aaa44ddef80 100644 --- a/src/sentry/conf/types/topic_definition.py +++ b/src/sentry/conf/types/topic_definition.py @@ -5,3 +5,5 @@ class TopicDefinition(TypedDict): cluster: str + # The topic name may be overridden from the default via KAFKA_TOPIC_OVERRIDES + real_topic_name: str diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 1f4788fe70b247..e7e9b5a21987d2 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -186,48 +186,41 @@ def ingest_events_options() -> list[click.Option]: ), ] - -_INGEST_SPANS_OPTIONS = multiprocessing_options(default_max_batch_size=100) + [ - click.Option(["--output-topic", "output_topic"], type=str, default="snuba-spans"), -] - # consumer name -> consumer definition -# TODO: `topic` should gradually be migrated to the logical topic rather than the overridden -# string. We support both currently for backward compatibility. KAFKA_CONSUMERS: Mapping[str, ConsumerDefinition] = { "ingest-profiles": { - "topic": settings.KAFKA_PROFILES, + "topic": Topic.PROFILES, "strategy_factory": "sentry.profiles.consumers.process.factory.ProcessProfileStrategyFactory", }, "ingest-replay-recordings": { - "topic": settings.KAFKA_INGEST_REPLAYS_RECORDINGS, + "topic": Topic.INGEST_REPLAYS_RECORDINGS, "strategy_factory": "sentry.replays.consumers.recording.ProcessReplayRecordingStrategyFactory", "click_options": ingest_replay_recordings_options(), }, "ingest-replay-recordings-buffered": { - "topic": settings.KAFKA_INGEST_REPLAYS_RECORDINGS, + "topic": Topic.INGEST_REPLAYS_RECORDINGS, "strategy_factory": "sentry.replays.consumers.recording_buffered.RecordingBufferedStrategyFactory", "click_options": ingest_replay_recordings_buffered_options(), }, "ingest-monitors": { - "topic": settings.KAFKA_INGEST_MONITORS, + "topic": Topic.INGEST_MONITORS, "strategy_factory": "sentry.monitors.consumers.monitor_consumer.StoreMonitorCheckInStrategyFactory", "click_options": ingest_monitors_options(), }, "billing-metrics-consumer": { - "topic": settings.KAFKA_SNUBA_GENERIC_METRICS, + "topic": Topic.SNUBA_GENERIC_METRICS, "strategy_factory": "sentry.ingest.billing_metrics_consumer.BillingMetricsConsumerStrategyFactory", }, # Known differences to 'sentry run occurrences-ingest-consumer': # - ingest_consumer_types metric tag is missing. Use the kafka_topic and # group_id tags provided by run_basic_consumer instead "ingest-occurrences": { - "topic": settings.KAFKA_INGEST_OCCURRENCES, + "topic": Topic.INGEST_OCCURRENCES, "strategy_factory": "sentry.issues.run.OccurrenceStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=20), }, "events-subscription-results": { - "topic": settings.KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS, + "topic": Topic.EVENTS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), "static_args": { @@ -235,7 +228,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "transactions-subscription-results": { - "topic": settings.KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS, + "topic": Topic.TRANSACTIONS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), "static_args": { @@ -252,7 +245,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "sessions-subscription-results": { - "topic": settings.KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS, + "topic": Topic.SESSIONS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(), "static_args": { @@ -260,7 +253,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "metrics-subscription-results": { - "topic": settings.KAFKA_METRICS_SUBSCRIPTIONS_RESULTS, + "topic": Topic.METRICS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), "static_args": { @@ -268,7 +261,15 @@ def ingest_events_options() -> list[click.Option]: }, }, "ingest-events": { - "topic": settings.KAFKA_INGEST_EVENTS, + "topic": Topic.INGEST_EVENTS, + "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", + "click_options": ingest_events_options(), + "static_args": { + "consumer_type": "events", + }, + }, + "ingest-feedback-events": { + "topic": settings.KAFKA_INGEST_FEEDBACK_EVENTS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", "click_options": ingest_events_options(), "static_args": { @@ -276,7 +277,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "ingest-attachments": { - "topic": settings.KAFKA_INGEST_ATTACHMENTS, + "topic": Topic.INGEST_ATTACHMENTS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", "click_options": ingest_events_options(), "static_args": { @@ -284,7 +285,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "ingest-transactions": { - "topic": settings.KAFKA_INGEST_TRANSACTIONS, + "topic": Topic.INGEST_TRANSACTIONS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", "click_options": ingest_events_options(), "static_args": { @@ -292,29 +293,29 @@ def ingest_events_options() -> list[click.Option]: }, }, "ingest-metrics": { - "topic": settings.KAFKA_INGEST_METRICS, + "topic": Topic.INGEST_METRICS, "strategy_factory": "sentry.sentry_metrics.consumers.indexer.parallel.MetricsConsumerStrategyFactory", "click_options": _METRICS_INDEXER_OPTIONS, "static_args": { "ingest_profile": "release-health", }, - "dlq_topic": settings.KAFKA_INGEST_METRICS_DLQ, + "dlq_topic": Topic.INGEST_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, "ingest-generic-metrics": { - "topic": settings.KAFKA_INGEST_PERFORMANCE_METRICS, + "topic": Topic.INGEST_PERFORMANCE_METRICS, "strategy_factory": "sentry.sentry_metrics.consumers.indexer.parallel.MetricsConsumerStrategyFactory", "click_options": _METRICS_INDEXER_OPTIONS, "static_args": { "ingest_profile": "performance", }, - "dlq_topic": settings.KAFKA_INGEST_GENERIC_METRICS_DLQ, + "dlq_topic": Topic.INGEST_GENERIC_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, "generic-metrics-last-seen-updater": { - "topic": settings.KAFKA_SNUBA_GENERIC_METRICS, + "topic": Topic.SNUBA_GENERIC_METRICS, "strategy_factory": "sentry.sentry_metrics.consumers.last_seen_updater.LastSeenUpdaterStrategyFactory", "click_options": _METRICS_LAST_SEEN_UPDATER_OPTIONS, "static_args": { @@ -322,7 +323,7 @@ def ingest_events_options() -> list[click.Option]: }, }, "metrics-last-seen-updater": { - "topic": settings.KAFKA_SNUBA_METRICS, + "topic": Topic.SNUBA_METRICS, "strategy_factory": "sentry.sentry_metrics.consumers.last_seen_updater.LastSeenUpdaterStrategyFactory", "click_options": _METRICS_LAST_SEEN_UPDATER_OPTIONS, "static_args": { @@ -330,28 +331,28 @@ def ingest_events_options() -> list[click.Option]: }, }, "post-process-forwarder-issue-platform": { - "topic": settings.KAFKA_EVENTSTREAM_GENERIC, + "topic": Topic.EVENTSTREAM_GENERIC, "strategy_factory": "sentry.eventstream.kafka.dispatch.EventPostProcessForwarderStrategyFactory", "synchronize_commit_log_topic_default": "snuba-generic-events-commit-log", "synchronize_commit_group_default": "generic_events_group", "click_options": _POST_PROCESS_FORWARDER_OPTIONS, }, "post-process-forwarder-transactions": { - "topic": settings.KAFKA_TRANSACTIONS, + "topic": Topic.TRANSACTIONS, "strategy_factory": "sentry.eventstream.kafka.dispatch.EventPostProcessForwarderStrategyFactory", "synchronize_commit_log_topic_default": "snuba-transactions-commit-log", "synchronize_commit_group_default": "transactions_group", "click_options": _POST_PROCESS_FORWARDER_OPTIONS, }, "post-process-forwarder-errors": { - "topic": settings.KAFKA_EVENTS, + "topic": Topic.EVENTS, "strategy_factory": "sentry.eventstream.kafka.dispatch.EventPostProcessForwarderStrategyFactory", "synchronize_commit_log_topic_default": "snuba-commit-log", "synchronize_commit_group_default": "snuba-consumers", "click_options": _POST_PROCESS_FORWARDER_OPTIONS, }, "process-spans": { - "topic": settings.KAFKA_SNUBA_SPANS, + "topic": Topic.SNUBA_SPANS, "strategy_factory": "sentry.spans.consumers.process.factory.ProcessSpansStrategyFactory", }, **settings.SENTRY_KAFKA_CONSUMERS, @@ -405,15 +406,8 @@ def get_stream_processor( strategy_factory_cls = import_string(consumer_definition["strategy_factory"]) consumer_topic = consumer_definition["topic"] - if isinstance(consumer_topic, Topic): - default_topic = consumer_topic.value - real_topic = settings.KAFKA_TOPIC_OVERRIDES.get(default_topic, default_topic) - else: - # TODO: Deprecated, remove once this way is no longer used - if not isinstance(consumer_topic, str): - real_topic = consumer_topic() - else: - real_topic = consumer_topic + default_topic = consumer_topic.value + real_topic = settings.KAFKA_TOPIC_OVERRIDES.get(default_topic, default_topic) if topic is None: topic = real_topic @@ -496,10 +490,6 @@ def build_consumer_config(group_id: str): validate_schema = consumer_definition.get("validate_schema") or False if validate_schema: - # TODO: Remove this later but for now we can only validate if `topic_def` is - # the logical topic and not the legacy override topic - assert isinstance(consumer_topic, Topic) - strategy_factory = ValidateSchemaStrategyFactoryWrapper( consumer_topic.value, validate_schema, strategy_factory ) @@ -517,7 +507,8 @@ def build_consumer_config(group_id: str): f"Cannot enable DLQ for consumer: {consumer_name}, no DLQ topic has been defined for it" ) from e try: - cluster_setting = get_topic_definition(dlq_topic)["cluster"] + dlq_topic_defn = get_topic_definition(dlq_topic) + cluster_setting = dlq_topic_defn["cluster"] except ValueError as e: raise click.BadParameter( f"Cannot enable DLQ for consumer: {consumer_name}, DLQ topic {dlq_topic} is not configured in this environment" @@ -527,7 +518,7 @@ def build_consumer_config(group_id: str): dlq_producer = KafkaProducer(producer_config) dlq_policy = DlqPolicy( - KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic)), + KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic_defn["real_topic_name"])), DlqLimit( max_invalid_ratio=consumer_definition["dlq_max_invalid_ratio"], max_consecutive_count=consumer_definition["dlq_max_consecutive_count"], diff --git a/src/sentry/deletions/__init__.py b/src/sentry/deletions/__init__.py index 81ba3a368a4625..9148230eeeee16 100644 --- a/src/sentry/deletions/__init__.py +++ b/src/sentry/deletions/__init__.py @@ -78,7 +78,6 @@ descendants, such as Event, so it can more efficiently bulk delete rows. """ - from .base import BulkModelDeletionTask, ModelDeletionTask, ModelRelation # NOQA from .defaults.artifactbundle import ArtifactBundleDeletionTask from .manager import DeletionTaskManager @@ -89,7 +88,11 @@ def load_defaults(): from sentry import models from sentry.discover.models import DiscoverSavedQuery - from sentry.incidents.models import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction + from sentry.incidents.models.alert_rule import ( + AlertRule, + AlertRuleTrigger, + AlertRuleTriggerAction, + ) from sentry.models.commitfilechange import CommitFileChange from sentry.monitors import models as monitor_models diff --git a/src/sentry/deletions/defaults/alert_rule_trigger.py b/src/sentry/deletions/defaults/alert_rule_trigger.py index fe36d050fc8c5e..9aa1469390a79f 100644 --- a/src/sentry/deletions/defaults/alert_rule_trigger.py +++ b/src/sentry/deletions/defaults/alert_rule_trigger.py @@ -3,7 +3,7 @@ class AlertRuleTriggerDeletionTask(ModelDeletionTask): def get_child_relations(self, instance): - from sentry.incidents.models import AlertRuleTriggerAction + from sentry.incidents.models.alert_rule import AlertRuleTriggerAction return [ ModelRelation(AlertRuleTriggerAction, {"alert_rule_trigger_id": instance.id}), diff --git a/src/sentry/deletions/defaults/alertrule.py b/src/sentry/deletions/defaults/alertrule.py index 39340100891b77..2911d3fbe620fa 100644 --- a/src/sentry/deletions/defaults/alertrule.py +++ b/src/sentry/deletions/defaults/alertrule.py @@ -7,7 +7,7 @@ class AlertRuleDeletionTask(ModelDeletionTask): manager_name = "objects_with_snapshots" def get_child_relations(self, instance): - from sentry.incidents.models import AlertRuleTrigger + from sentry.incidents.models.alert_rule import AlertRuleTrigger return [ ModelRelation(AlertRuleTrigger, {"alert_rule_id": instance.id}), diff --git a/src/sentry/deletions/defaults/organization.py b/src/sentry/deletions/defaults/organization.py index 43cda85e52a54e..659dc77fc3e766 100644 --- a/src/sentry/deletions/defaults/organization.py +++ b/src/sentry/deletions/defaults/organization.py @@ -19,7 +19,8 @@ def should_proceed(self, instance): def get_child_relations(self, instance): from sentry.deletions.defaults.discoversavedquery import DiscoverSavedQueryDeletionTask from sentry.discover.models import DiscoverSavedQuery, TeamKeyTransaction - from sentry.incidents.models import AlertRule, Incident + from sentry.incidents.models.alert_rule import AlertRule + from sentry.incidents.models.incident import Incident from sentry.models.artifactbundle import ArtifactBundle from sentry.models.commitauthor import CommitAuthor from sentry.models.dashboard import Dashboard diff --git a/src/sentry/deletions/defaults/project.py b/src/sentry/deletions/defaults/project.py index cca0783a987410..37aa34cdd98c54 100644 --- a/src/sentry/deletions/defaults/project.py +++ b/src/sentry/deletions/defaults/project.py @@ -7,7 +7,8 @@ class ProjectDeletionTask(ModelDeletionTask): def get_child_relations(self, instance): from sentry import models from sentry.discover.models import DiscoverSavedQueryProject - from sentry.incidents.models import AlertRule, IncidentProject + from sentry.incidents.models.alert_rule import AlertRule + from sentry.incidents.models.incident import IncidentProject from sentry.models.projectteam import ProjectTeam from sentry.monitors.models import Monitor from sentry.replays.models import ReplayRecordingSegment diff --git a/src/sentry/deletions/defaults/team.py b/src/sentry/deletions/defaults/team.py index f799550c155120..5b3847af48e71d 100644 --- a/src/sentry/deletions/defaults/team.py +++ b/src/sentry/deletions/defaults/team.py @@ -17,7 +17,7 @@ def mark_deletion_in_progress(self, instance_list): instance.update(status=TeamStatus.DELETION_IN_PROGRESS) def delete_instance(self, instance): - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule from sentry.models.rule import Rule AlertRule.objects.filter(owner_id=instance.actor_id).update(owner=None) diff --git a/src/sentry/dynamic_sampling/tasks/utils.py b/src/sentry/dynamic_sampling/tasks/utils.py index 13e333744ca662..c44acf0526c27c 100644 --- a/src/sentry/dynamic_sampling/tasks/utils.py +++ b/src/sentry/dynamic_sampling/tasks/utils.py @@ -1,7 +1,5 @@ from functools import wraps -import sentry_sdk - from sentry import features from sentry.dynamic_sampling.tasks.common import TimeoutException from sentry.dynamic_sampling.tasks.logging import log_task_execution, log_task_timeout @@ -38,12 +36,9 @@ def _wrapper(): try: func(context=context) except TimeoutException: - sentry_sdk.set_extra("context-data", context.to_dict()) log_task_timeout(context) raise else: - sentry_sdk.set_extra("context-data", context.to_dict()) - sentry_sdk.capture_message(f"timing for {task_name}") log_task_execution(context) return _wrapper diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index fcac517bf48539..cbf999469d39ab 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -1492,12 +1492,10 @@ def _save_aggregate( ): raise HashDiscarded("Load shedding group creation", reason="load_shed") - with sentry_sdk.start_span( - op="event_manager.create_group_transaction" - ) as span, metrics.timer( - "event_manager.create_group_transaction" - ) as metric_tags, transaction.atomic( - router.db_for_write(GroupHash) + with ( + sentry_sdk.start_span(op="event_manager.create_group_transaction") as span, + metrics.timer("event_manager.create_group_transaction") as metric_tags, + transaction.atomic(router.db_for_write(GroupHash)), ): span.set_tag("create_group_transaction.outcome", "no_group") metric_tags["create_group_transaction.outcome"] = "no_group" @@ -2696,10 +2694,12 @@ def _calculate_span_grouping(jobs: Sequence[Job], projects: ProjectsMapping) -> @metrics.wraps("save_event.detect_performance_problems") -def _detect_performance_problems(jobs: Sequence[Job], projects: ProjectsMapping) -> None: +def _detect_performance_problems( + jobs: Sequence[Job], projects: ProjectsMapping, is_standalone_spans: bool = False +) -> None: for job in jobs: job["performance_problems"] = detect_performance_problems( - job["data"], projects[job["project_id"]] + job["data"], projects[job["project_id"]], is_standalone_spans=is_standalone_spans ) diff --git a/src/sentry/eventstore/reprocessing/__init__.py b/src/sentry/eventstore/reprocessing/__init__.py new file mode 100644 index 00000000000000..a0262cf36a4370 --- /dev/null +++ b/src/sentry/eventstore/reprocessing/__init__.py @@ -0,0 +1,13 @@ +from django.conf import settings + +from sentry.eventstore.reprocessing.base import ReprocessingStore +from sentry.utils.services import LazyServiceWrapper + +reprocessing_store = LazyServiceWrapper( + ReprocessingStore, + settings.SENTRY_REPROCESSING_STORE, + settings.SENTRY_REPROCESSING_STORE_OPTIONS, +) + + +__all__ = ["reprocessing_store"] diff --git a/src/sentry/eventstore/reprocessing/base.py b/src/sentry/eventstore/reprocessing/base.py new file mode 100644 index 00000000000000..59feb6f6625c35 --- /dev/null +++ b/src/sentry/eventstore/reprocessing/base.py @@ -0,0 +1,72 @@ +from collections.abc import Sequence +from datetime import datetime +from typing import Any + +from sentry.utils.services import Service + + +class ReprocessingStore(Service): + __all__ = ( + "event_count_for_hashes", + "pop_batched_events", + "get_old_primary_hashes", + "expire_hash", + "add_hash", + "get_remaining_event_count", + "rename_key", + "mark_event_reprocessed", + "start_reprocessing", + "get_pending", + "get_progress", + ) + + def __init__(self, **options: Any) -> None: + pass + + def event_count_for_hashes( + self, project_id: int, group_id: int, old_primary_hashes: Sequence[str] + ) -> int: + raise NotImplementedError() + + def pop_batched_events( + self, project_id: int, group_id: int, primary_hash: str + ) -> tuple[list[str], datetime | None, datetime | None]: + raise NotImplementedError() + + def get_old_primary_hashes(self, project_id: int, group_id: int) -> set[Any]: + raise NotImplementedError() + + def expire_hash( + self, + project_id: int, + group_id: int, + event_id: str, + date_val: datetime, + old_primary_hash: str, + ) -> None: + raise NotImplementedError() + + def add_hash(self, project_id: int, group_id: int, hash: str) -> None: + raise NotImplementedError() + + def get_remaining_event_count( + self, project_id: int, old_group_id: int, datetime_to_event: list[tuple[datetime, str]] + ) -> int: + raise NotImplementedError() + + def rename_key(self, project_id: int, old_group_id: int) -> str | None: + raise NotImplementedError() + + def mark_event_reprocessed(self, group_id: int, num_events: int) -> bool: + raise NotImplementedError() + + def start_reprocessing( + self, group_id: int, date_created: Any, sync_count: int, event_count: int + ) -> None: + raise NotImplementedError() + + def get_pending(self, group_id: int) -> Any: + raise NotImplementedError() + + def get_progress(self, group_id: int) -> dict[str, Any] | None: + raise NotImplementedError() diff --git a/src/sentry/eventstore/reprocessing/redis.py b/src/sentry/eventstore/reprocessing/redis.py new file mode 100644 index 00000000000000..046fb709cfaebd --- /dev/null +++ b/src/sentry/eventstore/reprocessing/redis.py @@ -0,0 +1,172 @@ +import uuid +from collections.abc import Sequence +from datetime import datetime +from typing import Any + +import redis +from django.conf import settings + +from sentry.utils import json +from sentry.utils.dates import to_datetime, to_timestamp +from sentry.utils.redis import redis_clusters + +from .base import ReprocessingStore + + +def _get_sync_counter_key(group_id: int) -> str: + return f"re2:count:{group_id}" + + +def _get_info_reprocessed_key(group_id: int) -> str: + return f"re2:info:{group_id}" + + +def _get_old_primary_hash_subset_key(project_id: int, group_id: int, primary_hash: str) -> str: + return f"re2:tombstones:{{{project_id}:{group_id}:{primary_hash}}}" + + +def _get_remaining_key(project_id: int, group_id: int) -> str: + return f"re2:remaining:{{{project_id}:{group_id}}}" + + +class RedisReprocessingStore(ReprocessingStore): + def __init__(self, **options: dict[str, Any]) -> None: + cluster = options.pop("cluster", "default") + assert isinstance(cluster, str), "cluster option must be a string" + self.redis = redis_clusters.get(cluster) + + def event_count_for_hashes( + self, project_id: int, group_id: int, old_primary_hashes: Sequence[str] + ) -> int: + # Events for a group are split and bucketed by their primary hashes. If flushing is to be + # performed on a per-group basis, the event count needs to be summed up across all buckets + # belonging to a single group. + event_count = 0 + for primary_hash in old_primary_hashes: + key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) + event_count += self.redis.llen(key) + return event_count + + def pop_batched_events( + self, project_id: int, group_id: int, primary_hash: str + ) -> tuple[list[str], datetime | None, datetime | None]: + """ + For redis key pointing to a list of buffered events structured like + `event id;datetime of event`, returns a list of event IDs, the + earliest datetime, and the latest datetime. + """ + event_ids_batch = [] + min_datetime: datetime | None = None + max_datetime: datetime | None = None + key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) + + for row in self.redis.lrange(key, 0, -1): + datetime_raw, event_id = row.split(";") + parsed_datetime = to_datetime(float(datetime_raw)) + + assert parsed_datetime is not None + + if min_datetime is None or parsed_datetime < min_datetime: + min_datetime = parsed_datetime + if max_datetime is None or parsed_datetime > max_datetime: + max_datetime = parsed_datetime + + event_ids_batch.append(event_id) + + self.redis.delete(key) + + return event_ids_batch, min_datetime, max_datetime + + def get_old_primary_hashes(self, project_id: int, group_id: int) -> set[Any]: + # This is a meta key that contains old primary hashes. These hashes are then + # combined with other values to construct a key that points to a list of + # tombstonable events. + primary_hash_set_key = f"re2:tombstone-primary-hashes:{project_id}:{group_id}" + + return self.redis.smembers(primary_hash_set_key) + + def expire_hash( + self, + project_id: int, + group_id: int, + event_id: str, + date_val: datetime, + old_primary_hash: str, + ) -> None: + event_key = _get_old_primary_hash_subset_key(project_id, group_id, old_primary_hash) + self.redis.lpush(event_key, f"{to_timestamp(date_val)};{event_id}") + self.redis.expire(event_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) + + def add_hash(self, project_id: int, group_id: int, hash: str) -> None: + primary_hash_set_key = f"re2:tombstone-primary-hashes:{project_id}:{group_id}" + + self.redis.sadd(primary_hash_set_key, hash) + self.redis.expire(primary_hash_set_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) + + def get_remaining_event_count( + self, project_id: int, old_group_id: int, datetime_to_event: list[tuple[datetime, str]] + ) -> int: + # We explicitly cluster by only project_id and group_id here such that our + # RENAME command later succeeds. + key = _get_remaining_key(project_id, old_group_id) + + if datetime_to_event: + llen = self.redis.lpush( + key, + *( + f"{to_timestamp(datetime)};{event_id}" + for datetime, event_id in datetime_to_event + ), + ) + self.redis.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) + else: + llen = self.redis.llen(key) + return llen + + def rename_key(self, project_id: int, old_group_id: int) -> str | None: + key = _get_remaining_key(project_id, old_group_id) + new_key = f"{key}:{uuid.uuid4().hex}" + try: + # Rename `key` to a new temp key that is passed to celery task. We + # use `renamenx` instead of `rename` only to detect UUID collisions. + assert self.redis.renamenx(key, new_key), "UUID collision for new_key?" + + return new_key + except redis.exceptions.ResponseError: + # `key` does not exist in Redis. `ResponseError` is a bit too broad + # but it seems we'd have to do string matching on error message + # otherwise. + return None + + def mark_event_reprocessed(self, group_id: int, num_events: int) -> bool: + # refresh the TTL of the metadata: + self.redis.expire( + _get_info_reprocessed_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL + ) + key = _get_sync_counter_key(group_id) + self.redis.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) + return self.redis.decrby(key, num_events) == 0 + + def start_reprocessing( + self, group_id: int, date_created: Any, sync_count: int, event_count: int + ) -> None: + self.redis.setex( + _get_sync_counter_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL, sync_count + ) + self.redis.setex( + _get_info_reprocessed_key(group_id), + settings.SENTRY_REPROCESSING_SYNC_TTL, + json.dumps( + {"dateCreated": date_created, "syncCount": sync_count, "totalEvents": event_count} + ), + ) + + def get_pending(self, group_id: int) -> tuple[int | None, int]: + pending_key = _get_sync_counter_key(group_id) + pending = self.redis.get(pending_key) + ttl = self.redis.ttl(pending_key) + return pending, ttl + + def get_progress(self, group_id: int) -> dict[str, Any] | None: + info = self.redis.get(_get_info_reprocessed_key(group_id)) + return info diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py index 4ec2aa728aa37f..97fe9b98de8435 100644 --- a/src/sentry/eventstream/kafka/backend.py +++ b/src/sentry/eventstream/kafka/backend.py @@ -7,9 +7,9 @@ from confluent_kafka import KafkaError from confluent_kafka import Message as KafkaMessage from confluent_kafka import Producer -from django.conf import settings from sentry import options +from sentry.conf.types.kafka_definition import Topic from sentry.eventstream.base import EventStreamEventType, GroupStates from sentry.eventstream.snuba import KW_SKIP_SEMANTIC_PARTITIONING, SnubaProtocolEventStream from sentry.killswitches import killswitch_matches_context @@ -24,15 +24,15 @@ class KafkaEventStream(SnubaProtocolEventStream): def __init__(self, **options: Any) -> None: - self.topic = settings.KAFKA_EVENTS - self.transactions_topic = settings.KAFKA_TRANSACTIONS - self.issue_platform_topic = settings.KAFKA_EVENTSTREAM_GENERIC - self.__producers: MutableMapping[str, Producer] = {} + self.topic = Topic.EVENTS + self.transactions_topic = Topic.TRANSACTIONS + self.issue_platform_topic = Topic.EVENTSTREAM_GENERIC + self.__producers: MutableMapping[Topic, Producer] = {} - def get_transactions_topic(self, project_id: int) -> str: + def get_transactions_topic(self, project_id: int) -> Topic: return self.transactions_topic - def get_producer(self, topic: str) -> Producer: + def get_producer(self, topic: Topic) -> Producer: if topic not in self.__producers: cluster_name = get_topic_definition(topic)["cluster"] cluster_options = get_kafka_producer_cluster_options(cluster_name) @@ -202,9 +202,11 @@ def _send( assert isinstance(extra_data, tuple) + real_topic = get_topic_definition(topic)["real_topic_name"] + try: producer.produce( - topic=topic, + topic=real_topic, key=str(project_id).encode("utf-8") if not skip_semantic_partitioning else None, value=json.dumps((self.EVENT_PROTOCOL_VERSION, _type) + extra_data), on_delivery=self.delivery_callback, diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index fb45a3a496a149..779b40f18cec8d 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -70,6 +70,7 @@ default_manager.add("relocation:enabled", SystemFeature, FeatureHandlerStrategy.INTERNAL) # Organization scoped features that are in development or in customer trials. +default_manager.add("organizations:activated-alert-rules", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-allow-indexed", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-crash-free-metrics", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-filters", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) @@ -88,14 +89,13 @@ default_manager.add("organizations:dashboards-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:dashboards-mep", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:dashboards-rh-widget", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:default-inbound-filters", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:ddm-experimental", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:ddm-formulas", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:ddm-dashboard-import", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:ddm-ui", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:ddm-metrics-api-unit-normalization", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:default-high-priority-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:derive-code-mappings", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:derive-code-mappings-php", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:device-class-synthesis", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:device-classification", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:discover-events-rate-limit", OrganizationFeature, FeatureHandlerStrategy.REMOTE) @@ -176,6 +176,7 @@ default_manager.add("organizations:performance-database-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-db-main-thread-detector", OrganizationFeature) default_manager.add("organizations:performance-discover-widget-split-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:performance-discover-widget-split-override-save", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-file-io-main-thread-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-all-events-tab", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-issues-compressed-assets-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) @@ -240,8 +241,8 @@ default_manager.add("organizations:session-replay-count-query-optimize", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas-replayer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:session-replay-event-linking", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-issue-emails", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:session-replay-mobile-player", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-new-event-counts", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-recording-scrubbing", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-rage-click-issue-creation", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/features/permanent.py b/src/sentry/features/permanent.py index 38c3e415b35633..3c22b5cecdb565 100644 --- a/src/sentry/features/permanent.py +++ b/src/sentry/features/permanent.py @@ -16,7 +16,6 @@ def register_permanent_features(manager: FeatureManager): "organizations:advanced-search", "organizations:app-store-connect-multiple", "organizations:change-alerts", - "organizations:commit-context", "organizations:codecov-integration", "organizations:crash-rate-alerts", "organizations:custom-symbol-sources", diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index 35df2d586804b5..dbb17c9bf44d9a 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -3,7 +3,6 @@ import base64 import logging import os -import random import zlib from collections.abc import Sequence from hashlib import md5 @@ -19,8 +18,7 @@ from sentry_ophio.enhancers import Cache as RustCache from sentry_ophio.enhancers import Enhancements as RustEnhancements -from sentry import options, projectoptions -from sentry.features.rollout import in_random_rollout +from sentry import projectoptions from sentry.grouping.component import GroupingComponent from sentry.stacktraces.functions import set_in_app from sentry.utils import metrics @@ -150,37 +148,25 @@ def merge_rust_enhancements( def parse_rust_enhancements( - source: Literal["config_structure", "config_string"], input: str | bytes, force_parsing=False + source: Literal["config_structure", "config_string"], input: str | bytes ) -> RustEnhancements | None: """ Parses ``RustEnhancements`` from either a msgpack-encoded `config_structure`, or from the text representation called `config_string`. - - Parsing itself is controlled via an option, but can be forced via `force_parsing`. """ rust_enhancements = None - parse_rust_enhancements = force_parsing - if not force_parsing: - try: - parse_rust_enhancements = random.random() < options.get( - "grouping.rust_enhancers.parse_rate" - ) - except Exception: - parse_rust_enhancements = False - - if parse_rust_enhancements: - try: - if source == "config_structure": - assert isinstance(input, bytes) - rust_enhancements = RustEnhancements.from_config_structure(input, RUST_CACHE) - else: - assert isinstance(input, str) - rust_enhancements = RustEnhancements.parse(input, RUST_CACHE) + try: + if source == "config_structure": + assert isinstance(input, bytes) + rust_enhancements = RustEnhancements.from_config_structure(input, RUST_CACHE) + else: + assert isinstance(input, str) + rust_enhancements = RustEnhancements.parse(input, RUST_CACHE) - metrics.incr("rust_enhancements.parsing_performed", tags={"source": source}) - except Exception: - logger.exception("failed parsing Rust Enhancements from `%s`", source) + metrics.incr("rust_enhancements.parsing_performed", tags={"source": source}) + except Exception: + logger.exception("failed parsing Rust Enhancements from `%s`", source) return rust_enhancements @@ -201,13 +187,6 @@ def apply_rust_enhancements( if not rust_enhancements: return None - try: - use_rust_enhancements = in_random_rollout("grouping.rust_enhancers.modify_frames_rate") - except Exception: - use_rust_enhancements = False - if not use_rust_enhancements: - return None - try: e = exception_data or {} e = { @@ -249,13 +228,6 @@ def compare_rust_enhancers( sentry_sdk.capture_message("Rust Enhancements mismatch") -def prefer_rust_enhancers(): - try: - return in_random_rollout("grouping.rust_enhancers.prefer_rust_result") - except Exception: - return False - - class Enhancements: # NOTE: You must add a version to ``VERSIONS`` any time attributes are added # to this class, s.t. no enhancements lacking these attributes are loaded @@ -299,13 +271,15 @@ def apply_modifications_to_frame( self.rust_enhancements, match_frames, exception_data ) - if rust_enhanced_frames and prefer_rust_enhancers(): + if rust_enhanced_frames: for frame, (category, in_app) in zip(frames, rust_enhanced_frames): if in_app is not None: set_in_app(frame, in_app) if category is not None: set_path(frame, "data", "category", value=category) return + else: + logger.error("Rust enhancements were not applied successfully") in_memory_cache: dict[str, str] = {} @@ -476,8 +450,8 @@ def loads(cls, data) -> Enhancements: @classmethod @sentry_sdk.tracing.trace - def from_config_string(self, s, bases=None, id=None, force_rust_parsing=False) -> Enhancements: - rust_enhancements = parse_rust_enhancements("config_string", s, force_rust_parsing) + def from_config_string(self, s, bases=None, id=None) -> Enhancements: + rust_enhancements = parse_rust_enhancements("config_string", s) try: tree = enhancements_grammar.parse(s) @@ -815,9 +789,7 @@ def _load_configs() -> dict[str, Enhancements]: fn = fn.replace("@", ":") # NOTE: we want to force parsing the `RustEnhancements` here, as the base rules # are required for inheritance, and because they are well tested. - enhancements = Enhancements.from_config_string( - f.read(), id=fn[:-4], force_rust_parsing=True - ) + enhancements = Enhancements.from_config_string(f.read(), id=fn[:-4]) rv[fn[:-4]] = enhancements return rv diff --git a/src/sentry/hybridcloud/apigateway/proxy.py b/src/sentry/hybridcloud/apigateway/proxy.py index 0ec1e15729a1e8..c6b354aa9aa371 100644 --- a/src/sentry/hybridcloud/apigateway/proxy.py +++ b/src/sentry/hybridcloud/apigateway/proxy.py @@ -1,6 +1,7 @@ """ Utilities related to proxying a request to a region silo """ + from __future__ import annotations import logging @@ -166,6 +167,10 @@ def proxy_region_request( data=_body_with_length(request), stream=True, timeout=timeout, + # By default, external_request will resolve any redirects for any verb except for HEAD. + # We explicitly disable this behavior to avoid misrepresenting the original sentry.io request with the + # body response of the redirect. + allow_redirects=False, ) except Timeout: # remote silo timeout. Use DRF timeout instead diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py index b70db725c2cdb3..5e67e57bb17398 100644 --- a/src/sentry/incidents/action_handlers.py +++ b/src/sentry/incidents/action_handlers.py @@ -13,13 +13,8 @@ from sentry.charts.types import ChartSize from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS from sentry.incidents.charts import build_metric_alert_chart -from sentry.incidents.models import ( - INCIDENT_STATUS, - AlertRuleThresholdType, - AlertRuleTriggerAction, - IncidentStatus, - TriggerStatus, -) +from sentry.incidents.models.alert_rule import AlertRuleThresholdType, AlertRuleTriggerAction +from sentry.incidents.models.incident import INCIDENT_STATUS, IncidentStatus, TriggerStatus from sentry.models.rulesnooze import RuleSnooze from sentry.models.user import User from sentry.notifications.types import NotificationSettingEnum diff --git a/src/sentry/incidents/charts.py b/src/sentry/incidents/charts.py index 226e47b5fb647c..292af8a180d585 100644 --- a/src/sentry/incidents/charts.py +++ b/src/sentry/incidents/charts.py @@ -15,7 +15,8 @@ from sentry.charts import backend as charts from sentry.charts.types import ChartSize, ChartType from sentry.incidents.logic import translate_aggregate_field -from sentry.incidents.models import AlertRule, Incident +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import Incident from sentry.models.apikey import ApiKey from sentry.models.organization import Organization from sentry.models.user import User diff --git a/src/sentry/incidents/endpoints/bases.py b/src/sentry/incidents/endpoints/bases.py index 8ea857498c9af4..efb7349500ba6d 100644 --- a/src/sentry/incidents/endpoints/bases.py +++ b/src/sentry/incidents/endpoints/bases.py @@ -6,7 +6,7 @@ from sentry.api.bases.organization import OrganizationAlertRulePermission, OrganizationEndpoint from sentry.api.bases.project import ProjectAlertRulePermission, ProjectEndpoint from sentry.api.exceptions import ResourceDoesNotExist -from sentry.incidents.models import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction class ProjectAlertRuleEndpoint(ProjectEndpoint): diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py index 751a80c647786e..265f8cf7428598 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_available_action_index.py @@ -19,7 +19,7 @@ get_opsgenie_teams, get_pagerduty_services, ) -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.incidents.serializers import ACTION_TARGET_TYPE_TO_STRING from sentry.models.organization import Organization from sentry.services.hybrid_cloud.app import RpcSentryAppInstallation, app_service diff --git a/src/sentry/incidents/endpoints/organization_alert_rule_index.py b/src/sentry/incidents/endpoints/organization_alert_rule_index.py index 585e55efbca61c..3dcb2b12627e7d 100644 --- a/src/sentry/incidents/endpoints/organization_alert_rule_index.py +++ b/src/sentry/incidents/endpoints/organization_alert_rule_index.py @@ -34,7 +34,8 @@ from sentry.constants import ObjectStatus from sentry.exceptions import InvalidParams from sentry.incidents.logic import get_slack_actions_with_async_lookups -from sentry.incidents.models import AlertRule, Incident +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import Incident from sentry.incidents.serializers import AlertRuleSerializer as DrfAlertRuleSerializer from sentry.incidents.utils.sentry_apps import trigger_sentry_app_action_creators_for_incidents from sentry.integrations.slack.utils import RedisRuleStatus diff --git a/src/sentry/incidents/endpoints/organization_incident_comment_details.py b/src/sentry/incidents/endpoints/organization_incident_comment_details.py index aedd0123f4307f..e715a4ab2dbb17 100644 --- a/src/sentry/incidents/endpoints/organization_incident_comment_details.py +++ b/src/sentry/incidents/endpoints/organization_incident_comment_details.py @@ -10,7 +10,7 @@ from sentry.api.exceptions import ResourceDoesNotExist from sentry.api.serializers import serialize from sentry.incidents.logic import delete_comment, update_comment -from sentry.incidents.models import IncidentActivity, IncidentActivityType +from sentry.incidents.models.incident import IncidentActivity, IncidentActivityType class CommentSerializer(serializers.Serializer): diff --git a/src/sentry/incidents/endpoints/organization_incident_comment_index.py b/src/sentry/incidents/endpoints/organization_incident_comment_index.py index 89343fd6cdd7cf..dfe9db275e6839 100644 --- a/src/sentry/incidents/endpoints/organization_incident_comment_index.py +++ b/src/sentry/incidents/endpoints/organization_incident_comment_index.py @@ -13,7 +13,7 @@ extract_user_ids_from_mentions, ) from sentry.incidents.logic import create_incident_activity -from sentry.incidents.models import IncidentActivityType +from sentry.incidents.models.incident import IncidentActivityType class CommentSerializer(serializers.Serializer, MentionsMixin): diff --git a/src/sentry/incidents/endpoints/organization_incident_details.py b/src/sentry/incidents/endpoints/organization_incident_details.py index e618aacfed5a8d..ab8d04e75b5602 100644 --- a/src/sentry/incidents/endpoints/organization_incident_details.py +++ b/src/sentry/incidents/endpoints/organization_incident_details.py @@ -9,7 +9,7 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.incident import DetailedIncidentSerializer from sentry.incidents.logic import update_incident_status -from sentry.incidents.models import IncidentStatus, IncidentStatusMethod +from sentry.incidents.models.incident import IncidentStatus, IncidentStatusMethod class IncidentSerializer(serializers.Serializer): diff --git a/src/sentry/incidents/endpoints/organization_incident_index.py b/src/sentry/incidents/endpoints/organization_incident_index.py index 5f4d5a3cc6bf42..88279d697295d1 100644 --- a/src/sentry/incidents/endpoints/organization_incident_index.py +++ b/src/sentry/incidents/endpoints/organization_incident_index.py @@ -15,12 +15,8 @@ from sentry.api.serializers import serialize from sentry.api.serializers.models.incident import IncidentSerializer from sentry.exceptions import InvalidParams -from sentry.incidents.models import ( - AlertRuleActivity, - AlertRuleActivityType, - Incident, - IncidentStatus, -) +from sentry.incidents.models.alert_rule import AlertRuleActivity, AlertRuleActivityType +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.snuba.dataset import Dataset from sentry.utils.dates import ensure_aware diff --git a/src/sentry/incidents/endpoints/project_alert_rule_task_details.py b/src/sentry/incidents/endpoints/project_alert_rule_task_details.py index f502c841732084..60849ecb63bb80 100644 --- a/src/sentry/incidents/endpoints/project_alert_rule_task_details.py +++ b/src/sentry/incidents/endpoints/project_alert_rule_task_details.py @@ -7,7 +7,7 @@ from sentry.api.base import region_silo_endpoint from sentry.api.bases.project import ProjectEndpoint, ProjectSettingPermission from sentry.api.serializers import serialize -from sentry.incidents.models import AlertRule +from sentry.incidents.models.alert_rule import AlertRule from sentry.integrations.slack.utils import RedisRuleStatus diff --git a/src/sentry/incidents/endpoints/utils.py b/src/sentry/incidents/endpoints/utils.py index 2b258cdd5aac09..2007e4f8312d53 100644 --- a/src/sentry/incidents/endpoints/utils.py +++ b/src/sentry/incidents/endpoints/utils.py @@ -1,5 +1,5 @@ from sentry.api.helpers.teams import get_teams -from sentry.incidents.models import AlertRule, AlertRuleThresholdType +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleThresholdType def parse_team_params(request, organization, teams): diff --git a/src/sentry/incidents/logic.py b/src/sentry/incidents/logic.py index aa5e5e5f134ad0..3ad3022b77c91b 100644 --- a/src/sentry/incidents/logic.py +++ b/src/sentry/incidents/logic.py @@ -18,7 +18,7 @@ from sentry.auth.access import SystemAccess from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS, ObjectStatus from sentry.incidents import tasks -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import ( AlertRule, AlertRuleActivationCondition, AlertRuleActivity, @@ -30,6 +30,8 @@ AlertRuleTrigger, AlertRuleTriggerAction, AlertRuleTriggerExclusion, +) +from sentry.incidents.models.incident import ( Incident, IncidentActivity, IncidentActivityType, diff --git a/src/sentry/incidents/models/__init__.py b/src/sentry/incidents/models/__init__.py new file mode 100644 index 00000000000000..8f6adf234bba98 --- /dev/null +++ b/src/sentry/incidents/models/__init__.py @@ -0,0 +1,10 @@ +from .alert_rule import AlertRule, AlertRuleStatus, AlertRuleThresholdType, AlertRuleTriggerAction +from .incident import Incident + +__all__ = ( + "AlertRule", + "AlertRuleStatus", + "AlertRuleThresholdType", + "AlertRuleTriggerAction", + "Incident", +) diff --git a/src/sentry/incidents/models.py b/src/sentry/incidents/models/alert_rule.py similarity index 63% rename from src/sentry/incidents/models.py rename to src/sentry/incidents/models/alert_rule.py index 434734d74f8470..3ca802ac017794 100644 --- a/src/sentry/incidents/models.py +++ b/src/sentry/incidents/models/alert_rule.py @@ -6,44 +6,39 @@ from datetime import timedelta from enum import Enum from typing import Any, ClassVar, Self -from uuid import uuid4 from django.conf import settings from django.core.cache import cache -from django.db import IntegrityError, models, router, transaction +from django.db import models from django.db.models import QuerySet from django.db.models.signals import post_delete, post_save from django.utils import timezone -from sentry.backup.dependencies import PrimaryKeyMap, get_model_name +from sentry.backup.dependencies import PrimaryKeyMap from sentry.backup.helpers import ImportFlags from sentry.backup.scopes import ImportScope, RelocationScope from sentry.constants import ObjectStatus from sentry.db.models import ( - ArrayField, BoundedPositiveIntegerField, FlexibleForeignKey, JSONField, Model, - OneToOneCascadeDeletes, - UUIDField, region_silo_only_model, sane_repr, ) from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey from sentry.db.models.manager import BaseManager +from sentry.incidents.models.incident import IncidentTrigger from sentry.incidents.utils.constants import INCIDENTS_SNUBA_SUBSCRIPTION_TYPE from sentry.incidents.utils.types import AlertRuleActivationConditionType from sentry.models.actor import Actor from sentry.models.notificationaction import AbstractNotificationAction, ActionService, ActionTarget -from sentry.models.organization import Organization from sentry.models.project import Project from sentry.models.team import Team from sentry.services.hybrid_cloud.user.service import user_service from sentry.snuba.models import QuerySubscription from sentry.snuba.subscriptions import bulk_create_snuba_subscriptions, delete_snuba_subscription from sentry.utils import metrics -from sentry.utils.retries import TimedRetryPolicy alert_subscription_callback_registry: dict[ AlertRuleMonitorType, Callable[[QuerySubscription], bool] @@ -74,306 +69,6 @@ def invoke_alert_subscription_callback( logger = logging.getLogger(__name__) -@region_silo_only_model -class IncidentProject(Model): - __relocation_scope__ = RelocationScope.Excluded - - project = FlexibleForeignKey("sentry.Project", db_index=False, db_constraint=False) - incident = FlexibleForeignKey("sentry.Incident") - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentproject" - unique_together = (("project", "incident"),) - - -@region_silo_only_model -class IncidentSeen(Model): - __relocation_scope__ = RelocationScope.Excluded - - incident = FlexibleForeignKey("sentry.Incident") - user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", db_index=False) - last_seen = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentseen" - unique_together = (("user_id", "incident"),) - - -class IncidentManager(BaseManager["Incident"]): - CACHE_KEY = "incidents:active:%s:%s" - - def fetch_for_organization(self, organization, projects): - return self.filter(organization=organization, projects__in=projects).distinct() - - @classmethod - def _build_active_incident_cache_key(cls, alert_rule_id, project_id): - return cls.CACHE_KEY % (alert_rule_id, project_id) - - def get_active_incident(self, alert_rule, project): - cache_key = self._build_active_incident_cache_key(alert_rule.id, project.id) - incident = cache.get(cache_key) - if incident is None: - try: - incident = ( - Incident.objects.filter( - type=IncidentType.ALERT_TRIGGERED.value, - alert_rule=alert_rule, - projects=project, - ) - .exclude(status=IncidentStatus.CLOSED.value) - .order_by("-date_added")[0] - ) - except IndexError: - # Set this to False so that we can have a negative cache as well. - incident = False - cache.set(cache_key, incident) - if incident is False: - incident = None - elif not incident: - # If we had a falsey not None value in the cache, then we stored that there - # are no current active incidents. Just set to None - incident = None - - return incident - - @classmethod - def clear_active_incident_cache(cls, instance, **kwargs): - for project in instance.projects.all(): - cache.delete(cls._build_active_incident_cache_key(instance.alert_rule_id, project.id)) - assert ( - cache.get(cls._build_active_incident_cache_key(instance.alert_rule_id, project.id)) - is None - ) - - @classmethod - def clear_active_incident_project_cache(cls, instance, **kwargs): - cache.delete( - cls._build_active_incident_cache_key( - instance.incident.alert_rule_id, instance.project_id - ) - ) - assert ( - cache.get( - cls._build_active_incident_cache_key( - instance.incident.alert_rule_id, instance.project_id - ) - ) - is None - ) - - @TimedRetryPolicy.wrap(timeout=5, exceptions=(IntegrityError,)) - def create(self, organization, **kwargs): - """ - Creates an Incident. Fetches the maximum identifier value for the org - and increments it by one. If two incidents are created for the - Organization at the same time then an integrity error will be thrown, - and we'll retry again several times. I prefer to lock optimistically - here since if we're creating multiple Incidents a second for an - Organization then we're likely failing at making Incidents useful. - """ - with transaction.atomic(router.db_for_write(Organization)): - result = self.filter(organization=organization).aggregate(models.Max("identifier")) - identifier = result["identifier__max"] - if identifier is None: - identifier = 1 - else: - identifier += 1 - - return super().create(organization=organization, identifier=identifier, **kwargs) - - -class IncidentType(Enum): - DETECTED = 0 - ALERT_TRIGGERED = 2 - - -class IncidentStatus(Enum): - OPEN = 1 - CLOSED = 2 - WARNING = 10 - CRITICAL = 20 - - -class IncidentStatusMethod(Enum): - MANUAL = 1 - RULE_UPDATED = 2 - RULE_TRIGGERED = 3 - - -INCIDENT_STATUS = { - IncidentStatus.OPEN: "Open", - IncidentStatus.CLOSED: "Resolved", - IncidentStatus.CRITICAL: "Critical", - IncidentStatus.WARNING: "Warning", -} - - -@region_silo_only_model -class Incident(Model): - __relocation_scope__ = RelocationScope.Organization - - objects: ClassVar[IncidentManager] = IncidentManager() - - organization = FlexibleForeignKey("sentry.Organization") - projects = models.ManyToManyField( - "sentry.Project", related_name="incidents", through=IncidentProject - ) - alert_rule = FlexibleForeignKey("sentry.AlertRule", on_delete=models.PROTECT) - # Incrementing id that is specific to the org. - identifier = models.IntegerField() - # Identifier used to match incoming events from the detection algorithm - detection_uuid = UUIDField(null=True, db_index=True) - status = models.PositiveSmallIntegerField(default=IncidentStatus.OPEN.value) - status_method = models.PositiveSmallIntegerField( - default=IncidentStatusMethod.RULE_TRIGGERED.value - ) - type = models.PositiveSmallIntegerField() - title = models.TextField() - # When we suspect the incident actually started - date_started = models.DateTimeField(default=timezone.now) - # When we actually detected the incident - date_detected = models.DateTimeField(default=timezone.now) - date_added = models.DateTimeField(default=timezone.now) - date_closed = models.DateTimeField(null=True) - - class Meta: - app_label = "sentry" - db_table = "sentry_incident" - unique_together = (("organization", "identifier"),) - indexes = (models.Index(fields=("alert_rule", "type", "status")),) - - @property - def current_end_date(self): - """ - Returns the current end of the incident. Either the date it was closed, - or the current time if it's still open. - """ - return self.date_closed if self.date_closed else timezone.now() - - @property - def duration(self): - return self.current_end_date - self.date_started - - def normalize_before_relocation_import( - self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags - ) -> int | None: - old_pk = super().normalize_before_relocation_import(pk_map, scope, flags) - if old_pk is None: - return None - - # Generate a new UUID, if one exists. - if self.detection_uuid: - self.detection_uuid = uuid4() - return old_pk - - -@region_silo_only_model -class PendingIncidentSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization - - incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) - target_run_date = models.DateTimeField(db_index=True, default=timezone.now) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_pendingincidentsnapshot" - - -@region_silo_only_model -class IncidentSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization - - incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) - event_stats_snapshot = FlexibleForeignKey("sentry.TimeSeriesSnapshot", db_constraint=False) - unique_users = models.IntegerField() - total_events = models.IntegerField() - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentsnapshot" - - -@region_silo_only_model -class TimeSeriesSnapshot(Model): - __relocation_scope__ = RelocationScope.Organization - __relocation_dependencies__ = {"sentry.Incident"} - - start = models.DateTimeField() - end = models.DateTimeField() - values = ArrayField(of=ArrayField(models.FloatField())) - period = models.IntegerField() - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_timeseriessnapshot" - - @classmethod - def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> models.Q: - pks = IncidentSnapshot.objects.filter( - incident__in=pk_map.get_pks(get_model_name(Incident)) - ).values_list("event_stats_snapshot_id", flat=True) - - return q & models.Q(pk__in=pks) - - -class IncidentActivityType(Enum): - CREATED = 1 - STATUS_CHANGE = 2 - COMMENT = 3 - DETECTED = 4 - - -@region_silo_only_model -class IncidentActivity(Model): - __relocation_scope__ = RelocationScope.Organization - - incident = FlexibleForeignKey("sentry.Incident") - user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", null=True) - type: models.Field[int | IncidentActivityType, int] = models.IntegerField() - value = models.TextField(null=True) - previous_value = models.TextField(null=True) - comment = models.TextField(null=True) - date_added = models.DateTimeField(default=timezone.now) - notification_uuid = models.UUIDField("notification_uuid", null=True) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentactivity" - - def normalize_before_relocation_import( - self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags - ) -> int | None: - old_pk = super().normalize_before_relocation_import(pk_map, scope, flags) - if old_pk is None: - return None - - # Generate a new UUID, if one exists. - if self.notification_uuid: - self.notification_uuid = uuid4() - return old_pk - - -@region_silo_only_model -class IncidentSubscription(Model): - __relocation_scope__ = RelocationScope.Organization - - incident = FlexibleForeignKey("sentry.Incident", db_index=False) - user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE") - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidentsubscription" - unique_together = (("incident", "user_id"),) - - __repr__ = sane_repr("incident_id", "user_id") - - class AlertRuleStatus(Enum): PENDING = 0 SNAPSHOT = 4 @@ -578,9 +273,9 @@ def _validate_actor(self): if self.owner_id is not None and self.team_id is None and self.user_id is None: raise ValueError("AlertRule with owner requires either team_id or user_id") - def save(self, **kwargs: Any) -> None: + def save(self, *args, **kwargs: Any) -> None: self._validate_actor() - return super().save(**kwargs) + return super().save(*args, **kwargs) @property def created_by_id(self): @@ -652,61 +347,6 @@ def subscribe_projects( return [] -class TriggerStatus(Enum): - ACTIVE = 0 - RESOLVED = 1 - - -class IncidentTriggerManager(BaseManager["IncidentTrigger"]): - CACHE_KEY = "incident:triggers:%s" - - @classmethod - def _build_cache_key(cls, incident_id): - return cls.CACHE_KEY % incident_id - - def get_for_incident(self, incident): - """ - Fetches the IncidentTriggers associated with an Incident. Attempts to fetch from - cache then hits the database. - """ - cache_key = self._build_cache_key(incident.id) - triggers = cache.get(cache_key) - if triggers is None: - triggers = list(IncidentTrigger.objects.filter(incident=incident)) - cache.set(cache_key, triggers, 3600) - - return triggers - - @classmethod - def clear_incident_cache(cls, instance, **kwargs): - cache.delete(cls._build_cache_key(instance.id)) - assert cache.get(cls._build_cache_key(instance.id)) is None - - @classmethod - def clear_incident_trigger_cache(cls, instance, **kwargs): - cache.delete(cls._build_cache_key(instance.incident_id)) - assert cache.get(cls._build_cache_key(instance.incident_id)) is None - - -@region_silo_only_model -class IncidentTrigger(Model): - __relocation_scope__ = RelocationScope.Organization - - objects: ClassVar[IncidentTriggerManager] = IncidentTriggerManager() - - incident = FlexibleForeignKey("sentry.Incident", db_index=False) - alert_rule_trigger = FlexibleForeignKey("sentry.AlertRuleTrigger") - status = models.SmallIntegerField() - date_modified = models.DateTimeField(default=timezone.now, null=False) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_incidenttrigger" - unique_together = (("incident", "alert_rule_trigger"),) - indexes = (models.Index(fields=("alert_rule_trigger", "incident_id")),) - - class AlertRuleTriggerManager(BaseManager["AlertRuleTrigger"]): CACHE_KEY = "alert_rule_triggers:alert_rule:%s" @@ -834,7 +474,7 @@ class AlertRuleTriggerAction(AbstractNotificationAction): Type = ActionService TargetType = ActionTarget - _type_registrations = {} + _type_registrations: dict[ActionService, TypeRegistration] = {} INTEGRATION_TYPES = frozenset( ( @@ -989,11 +629,3 @@ def clean_expired_alerts(subscription: QuerySubscription) -> bool: post_save.connect(AlertRuleTriggerManager.clear_alert_rule_trigger_cache, sender=AlertRule) post_save.connect(AlertRuleTriggerManager.clear_trigger_cache, sender=AlertRuleTrigger) post_delete.connect(AlertRuleTriggerManager.clear_trigger_cache, sender=AlertRuleTrigger) - -post_save.connect(IncidentManager.clear_active_incident_cache, sender=Incident) -post_save.connect(IncidentManager.clear_active_incident_project_cache, sender=IncidentProject) -post_delete.connect(IncidentManager.clear_active_incident_project_cache, sender=IncidentProject) - -post_delete.connect(IncidentTriggerManager.clear_incident_cache, sender=Incident) -post_save.connect(IncidentTriggerManager.clear_incident_trigger_cache, sender=IncidentTrigger) -post_delete.connect(IncidentTriggerManager.clear_incident_trigger_cache, sender=IncidentTrigger) diff --git a/src/sentry/incidents/models/incident.py b/src/sentry/incidents/models/incident.py new file mode 100644 index 00000000000000..fff82e641fd0c5 --- /dev/null +++ b/src/sentry/incidents/models/incident.py @@ -0,0 +1,395 @@ +from __future__ import annotations + +import logging +from enum import Enum +from typing import ClassVar +from uuid import uuid4 + +from django.conf import settings +from django.core.cache import cache +from django.db import IntegrityError, models, router, transaction +from django.db.models.signals import post_delete, post_save +from django.utils import timezone + +from sentry.backup.dependencies import PrimaryKeyMap, get_model_name +from sentry.backup.helpers import ImportFlags +from sentry.backup.scopes import ImportScope, RelocationScope +from sentry.db.models import ( + ArrayField, + FlexibleForeignKey, + Model, + OneToOneCascadeDeletes, + UUIDField, + region_silo_only_model, + sane_repr, +) +from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey +from sentry.db.models.manager import BaseManager +from sentry.models.organization import Organization +from sentry.utils.retries import TimedRetryPolicy + +logger = logging.getLogger(__name__) + + +@region_silo_only_model +class IncidentProject(Model): + __relocation_scope__ = RelocationScope.Excluded + + project = FlexibleForeignKey("sentry.Project", db_index=False, db_constraint=False) + incident = FlexibleForeignKey("sentry.Incident") + + class Meta: + app_label = "sentry" + db_table = "sentry_incidentproject" + unique_together = (("project", "incident"),) + + +@region_silo_only_model +class IncidentSeen(Model): + __relocation_scope__ = RelocationScope.Excluded + + incident = FlexibleForeignKey("sentry.Incident") + user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", db_index=False) + last_seen = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_incidentseen" + unique_together = (("user_id", "incident"),) + + +class IncidentManager(BaseManager["Incident"]): + CACHE_KEY = "incidents:active:%s:%s" + + def fetch_for_organization(self, organization, projects): + return self.filter(organization=organization, projects__in=projects).distinct() + + @classmethod + def _build_active_incident_cache_key(cls, alert_rule_id, project_id): + return cls.CACHE_KEY % (alert_rule_id, project_id) + + def get_active_incident(self, alert_rule, project): + cache_key = self._build_active_incident_cache_key(alert_rule.id, project.id) + incident = cache.get(cache_key) + if incident is None: + try: + incident = ( + Incident.objects.filter( + type=IncidentType.ALERT_TRIGGERED.value, + alert_rule=alert_rule, + projects=project, + ) + .exclude(status=IncidentStatus.CLOSED.value) + .order_by("-date_added")[0] + ) + except IndexError: + # Set this to False so that we can have a negative cache as well. + incident = False + cache.set(cache_key, incident) + if incident is False: + incident = None + elif not incident: + # If we had a falsey not None value in the cache, then we stored that there + # are no current active incidents. Just set to None + incident = None + + return incident + + @classmethod + def clear_active_incident_cache(cls, instance, **kwargs): + for project in instance.projects.all(): + cache.delete(cls._build_active_incident_cache_key(instance.alert_rule_id, project.id)) + assert ( + cache.get(cls._build_active_incident_cache_key(instance.alert_rule_id, project.id)) + is None + ) + + @classmethod + def clear_active_incident_project_cache(cls, instance, **kwargs): + cache.delete( + cls._build_active_incident_cache_key( + instance.incident.alert_rule_id, instance.project_id + ) + ) + assert ( + cache.get( + cls._build_active_incident_cache_key( + instance.incident.alert_rule_id, instance.project_id + ) + ) + is None + ) + + @TimedRetryPolicy.wrap(timeout=5, exceptions=(IntegrityError,)) + def create(self, organization, **kwargs): + """ + Creates an Incident. Fetches the maximum identifier value for the org + and increments it by one. If two incidents are created for the + Organization at the same time then an integrity error will be thrown, + and we'll retry again several times. I prefer to lock optimistically + here since if we're creating multiple Incidents a second for an + Organization then we're likely failing at making Incidents useful. + """ + with transaction.atomic(router.db_for_write(Organization)): + result = self.filter(organization=organization).aggregate(models.Max("identifier")) + identifier = result["identifier__max"] + if identifier is None: + identifier = 1 + else: + identifier += 1 + + return super().create(organization=organization, identifier=identifier, **kwargs) + + +class IncidentType(Enum): + DETECTED = 0 + ALERT_TRIGGERED = 2 + + +class IncidentStatus(Enum): + OPEN = 1 + CLOSED = 2 + WARNING = 10 + CRITICAL = 20 + + +class IncidentStatusMethod(Enum): + MANUAL = 1 + RULE_UPDATED = 2 + RULE_TRIGGERED = 3 + + +INCIDENT_STATUS = { + IncidentStatus.OPEN: "Open", + IncidentStatus.CLOSED: "Resolved", + IncidentStatus.CRITICAL: "Critical", + IncidentStatus.WARNING: "Warning", +} + + +@region_silo_only_model +class Incident(Model): + __relocation_scope__ = RelocationScope.Organization + + objects: ClassVar[IncidentManager] = IncidentManager() + + organization = FlexibleForeignKey("sentry.Organization") + projects = models.ManyToManyField( + "sentry.Project", related_name="incidents", through=IncidentProject + ) + alert_rule = FlexibleForeignKey("sentry.AlertRule", on_delete=models.PROTECT) + # Incrementing id that is specific to the org. + identifier = models.IntegerField() + # Identifier used to match incoming events from the detection algorithm + detection_uuid = UUIDField(null=True, db_index=True) + status = models.PositiveSmallIntegerField(default=IncidentStatus.OPEN.value) + status_method = models.PositiveSmallIntegerField( + default=IncidentStatusMethod.RULE_TRIGGERED.value + ) + type = models.PositiveSmallIntegerField() + title = models.TextField() + # When we suspect the incident actually started + date_started = models.DateTimeField(default=timezone.now) + # When we actually detected the incident + date_detected = models.DateTimeField(default=timezone.now) + date_added = models.DateTimeField(default=timezone.now) + date_closed = models.DateTimeField(null=True) + + class Meta: + app_label = "sentry" + db_table = "sentry_incident" + unique_together = (("organization", "identifier"),) + indexes = (models.Index(fields=("alert_rule", "type", "status")),) + + @property + def current_end_date(self): + """ + Returns the current end of the incident. Either the date it was closed, + or the current time if it's still open. + """ + return self.date_closed if self.date_closed else timezone.now() + + @property + def duration(self): + return self.current_end_date - self.date_started + + def normalize_before_relocation_import( + self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags + ) -> int | None: + old_pk = super().normalize_before_relocation_import(pk_map, scope, flags) + if old_pk is None: + return None + + # Generate a new UUID, if one exists. + if self.detection_uuid: + self.detection_uuid = uuid4() + return old_pk + + +@region_silo_only_model +class PendingIncidentSnapshot(Model): + __relocation_scope__ = RelocationScope.Organization + + incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) + target_run_date = models.DateTimeField(db_index=True, default=timezone.now) + date_added = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_pendingincidentsnapshot" + + +@region_silo_only_model +class IncidentSnapshot(Model): + __relocation_scope__ = RelocationScope.Organization + + incident = OneToOneCascadeDeletes("sentry.Incident", db_constraint=False) + event_stats_snapshot = FlexibleForeignKey("sentry.TimeSeriesSnapshot", db_constraint=False) + unique_users = models.IntegerField() + total_events = models.IntegerField() + date_added = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_incidentsnapshot" + + +@region_silo_only_model +class TimeSeriesSnapshot(Model): + __relocation_scope__ = RelocationScope.Organization + __relocation_dependencies__ = {"sentry.Incident"} + + start = models.DateTimeField() + end = models.DateTimeField() + values = ArrayField(of=ArrayField(models.FloatField())) + period = models.IntegerField() + date_added = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_timeseriessnapshot" + + @classmethod + def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> models.Q: + pks = IncidentSnapshot.objects.filter( + incident__in=pk_map.get_pks(get_model_name(Incident)) + ).values_list("event_stats_snapshot_id", flat=True) + + return q & models.Q(pk__in=pks) + + +class IncidentActivityType(Enum): + CREATED = 1 + STATUS_CHANGE = 2 + COMMENT = 3 + DETECTED = 4 + + +@region_silo_only_model +class IncidentActivity(Model): + __relocation_scope__ = RelocationScope.Organization + + incident = FlexibleForeignKey("sentry.Incident") + user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE", null=True) + type: models.Field = models.IntegerField() + value = models.TextField(null=True) + previous_value = models.TextField(null=True) + comment = models.TextField(null=True) + date_added = models.DateTimeField(default=timezone.now) + notification_uuid = models.UUIDField("notification_uuid", null=True) + + class Meta: + app_label = "sentry" + db_table = "sentry_incidentactivity" + + def normalize_before_relocation_import( + self, pk_map: PrimaryKeyMap, scope: ImportScope, flags: ImportFlags + ) -> int | None: + old_pk = super().normalize_before_relocation_import(pk_map, scope, flags) + if old_pk is None: + return None + + # Generate a new UUID, if one exists. + if self.notification_uuid: + self.notification_uuid = uuid4() + return old_pk + + +@region_silo_only_model +class IncidentSubscription(Model): + __relocation_scope__ = RelocationScope.Organization + + incident = FlexibleForeignKey("sentry.Incident", db_index=False) + user_id = HybridCloudForeignKey(settings.AUTH_USER_MODEL, on_delete="CASCADE") + date_added = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_incidentsubscription" + unique_together = (("incident", "user_id"),) + + __repr__ = sane_repr("incident_id", "user_id") + + +class TriggerStatus(Enum): + ACTIVE = 0 + RESOLVED = 1 + + +class IncidentTriggerManager(BaseManager["IncidentTrigger"]): + CACHE_KEY = "incident:triggers:%s" + + @classmethod + def _build_cache_key(cls, incident_id): + return cls.CACHE_KEY % incident_id + + def get_for_incident(self, incident): + """ + Fetches the IncidentTriggers associated with an Incident. Attempts to fetch from + cache then hits the database. + """ + cache_key = self._build_cache_key(incident.id) + triggers = cache.get(cache_key) + if triggers is None: + triggers = list(IncidentTrigger.objects.filter(incident=incident)) + cache.set(cache_key, triggers, 3600) + + return triggers + + @classmethod + def clear_incident_cache(cls, instance, **kwargs): + cache.delete(cls._build_cache_key(instance.id)) + assert cache.get(cls._build_cache_key(instance.id)) is None + + @classmethod + def clear_incident_trigger_cache(cls, instance, **kwargs): + cache.delete(cls._build_cache_key(instance.incident_id)) + assert cache.get(cls._build_cache_key(instance.incident_id)) is None + + +@region_silo_only_model +class IncidentTrigger(Model): + __relocation_scope__ = RelocationScope.Organization + + objects: ClassVar[IncidentTriggerManager] = IncidentTriggerManager() + + incident = FlexibleForeignKey("sentry.Incident", db_index=False) + alert_rule_trigger = FlexibleForeignKey("sentry.AlertRuleTrigger") + status = models.SmallIntegerField() + date_modified = models.DateTimeField(default=timezone.now, null=False) + date_added = models.DateTimeField(default=timezone.now) + + class Meta: + app_label = "sentry" + db_table = "sentry_incidenttrigger" + unique_together = (("incident", "alert_rule_trigger"),) + indexes = (models.Index(fields=("alert_rule_trigger", "incident_id")),) + + +post_save.connect(IncidentManager.clear_active_incident_cache, sender=Incident) +post_save.connect(IncidentManager.clear_active_incident_project_cache, sender=IncidentProject) +post_delete.connect(IncidentManager.clear_active_incident_project_cache, sender=IncidentProject) + +post_delete.connect(IncidentTriggerManager.clear_incident_cache, sender=Incident) +post_save.connect(IncidentTriggerManager.clear_incident_trigger_cache, sender=IncidentTrigger) +post_delete.connect(IncidentTriggerManager.clear_incident_trigger_cache, sender=IncidentTrigger) diff --git a/src/sentry/incidents/receivers.py b/src/sentry/incidents/receivers.py index a26024e8afeea7..de8c5e03703a51 100644 --- a/src/sentry/incidents/receivers.py +++ b/src/sentry/incidents/receivers.py @@ -3,7 +3,8 @@ from django.db.models.signals import post_save, pre_save from django.dispatch import receiver -from sentry.incidents.models import AlertRule, IncidentTrigger +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import IncidentTrigger from sentry.models.project import Project diff --git a/src/sentry/incidents/serializers/__init__.py b/src/sentry/incidents/serializers/__init__.py index d544785ec92df7..1f5aed9e2af469 100644 --- a/src/sentry/incidents/serializers/__init__.py +++ b/src/sentry/incidents/serializers/__init__.py @@ -1,4 +1,4 @@ -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.snuba.dataset import Dataset from sentry.snuba.models import SnubaQuery, SnubaQueryEventType diff --git a/src/sentry/incidents/serializers/alert_rule.py b/src/sentry/incidents/serializers/alert_rule.py index 8d3d009fb87a4e..559cdd3269f23b 100644 --- a/src/sentry/incidents/serializers/alert_rule.py +++ b/src/sentry/incidents/serializers/alert_rule.py @@ -26,7 +26,12 @@ translate_aggregate_field, update_alert_rule, ) -from sentry.incidents.models import AlertRule, AlertRuleThresholdType, AlertRuleTrigger +from sentry.incidents.models.alert_rule import ( + AlertRule, + AlertRuleMonitorType, + AlertRuleThresholdType, + AlertRuleTrigger, +) from sentry.snuba.dataset import Dataset from sentry.snuba.entity_subscription import ( ENTITY_TIME_COLUMNS, @@ -196,6 +201,16 @@ def validate_threshold_type(self, threshold_type): % [item.value for item in AlertRuleThresholdType] ) + def validate_monitor_type(self, monitor_type): + if monitor_type > 0 and not features.has( + "organizations:activated-alert-rules", + self.context["organization"], + actor=self.context.get("user", None), + ): + raise serializers.ValidationError("Invalid monitor type") + + return AlertRuleMonitorType(monitor_type) + def validate(self, data): """ Performs validation on an alert rule's data. diff --git a/src/sentry/incidents/serializers/alert_rule_trigger.py b/src/sentry/incidents/serializers/alert_rule_trigger.py index c8820301190607..e6aa7d7fffad18 100644 --- a/src/sentry/incidents/serializers/alert_rule_trigger.py +++ b/src/sentry/incidents/serializers/alert_rule_trigger.py @@ -10,7 +10,7 @@ rewrite_trigger_action_fields, update_alert_rule_trigger, ) -from sentry.incidents.models import AlertRuleTrigger, AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTrigger, AlertRuleTriggerAction from .alert_rule_trigger_action import AlertRuleTriggerActionSerializer diff --git a/src/sentry/incidents/serializers/alert_rule_trigger_action.py b/src/sentry/incidents/serializers/alert_rule_trigger_action.py index 810852c293efaa..c326db9c97d47b 100644 --- a/src/sentry/incidents/serializers/alert_rule_trigger_action.py +++ b/src/sentry/incidents/serializers/alert_rule_trigger_action.py @@ -8,7 +8,7 @@ create_alert_rule_trigger_action, update_alert_rule_trigger_action, ) -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.incidents.serializers import ( ACTION_TARGET_TYPE_TO_STRING, STRING_TO_ACTION_TARGET_TYPE, diff --git a/src/sentry/incidents/subscription_processor.py b/src/sentry/incidents/subscription_processor.py index 70b7fb531091d4..9024252512b0cd 100644 --- a/src/sentry/incidents/subscription_processor.py +++ b/src/sentry/incidents/subscription_processor.py @@ -22,11 +22,14 @@ deduplicate_trigger_actions, update_incident_status, ) -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import ( AlertRule, AlertRuleMonitorType, AlertRuleThresholdType, AlertRuleTrigger, + invoke_alert_subscription_callback, +) +from sentry.incidents.models.incident import ( Incident, IncidentActivity, IncidentStatus, @@ -34,7 +37,6 @@ IncidentTrigger, IncidentType, TriggerStatus, - invoke_alert_subscription_callback, ) from sentry.incidents.tasks import handle_trigger_action from sentry.incidents.utils.types import QuerySubscriptionUpdate diff --git a/src/sentry/incidents/tasks.py b/src/sentry/incidents/tasks.py index 148ad317ddcb1c..6cfe422c3fc00e 100644 --- a/src/sentry/incidents/tasks.py +++ b/src/sentry/incidents/tasks.py @@ -7,10 +7,9 @@ from django.urls import reverse from sentry.auth.access import from_user -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import AlertRuleStatus, AlertRuleTriggerAction +from sentry.incidents.models.incident import ( INCIDENT_STATUS, - AlertRuleStatus, - AlertRuleTriggerAction, Incident, IncidentActivity, IncidentActivityType, @@ -240,7 +239,7 @@ def handle_trigger_action( ) def auto_resolve_snapshot_incidents(alert_rule_id: int, **kwargs: Any) -> None: from sentry.incidents.logic import update_incident_status - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule try: alert_rule = AlertRule.objects_with_snapshots.get(id=alert_rule_id) diff --git a/src/sentry/incidents/utils/sentry_apps.py b/src/sentry/incidents/utils/sentry_apps.py index 3eeefbb6093a56..d4b6cd175fcb3c 100644 --- a/src/sentry/incidents/utils/sentry_apps.py +++ b/src/sentry/incidents/utils/sentry_apps.py @@ -5,7 +5,7 @@ from sentry.auth.access import NoAccess from sentry.incidents.logic import get_filtered_actions -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.incidents.serializers import AlertRuleTriggerActionSerializer from sentry.services.hybrid_cloud.app import app_service diff --git a/src/sentry/ingest/types.py b/src/sentry/ingest/types.py index f0dd0c6ad36bf5..0f07507dd8ef43 100644 --- a/src/sentry/ingest/types.py +++ b/src/sentry/ingest/types.py @@ -6,19 +6,3 @@ class ConsumerType: Events = "events" # consumes simple events ( from the Events topic) Attachments = "attachments" # consumes events with attachments ( from the Attachments topic) Transactions = "transactions" # consumes transaction events ( from the Transactions topic) - - @staticmethod - def all(): - return (ConsumerType.Events, ConsumerType.Attachments, ConsumerType.Transactions) - - @staticmethod - def get_topic_name(consumer_type): - from django.conf import settings - - if consumer_type == ConsumerType.Events: - return settings.KAFKA_INGEST_EVENTS - elif consumer_type == ConsumerType.Attachments: - return settings.KAFKA_INGEST_ATTACHMENTS - elif consumer_type == ConsumerType.Transactions: - return settings.KAFKA_INGEST_TRANSACTIONS - raise ValueError("Invalid consumer type", consumer_type) diff --git a/src/sentry/integrations/bitbucket/uninstalled.py b/src/sentry/integrations/bitbucket/uninstalled.py index 90ce7563e513c7..071e1faed76df0 100644 --- a/src/sentry/integrations/bitbucket/uninstalled.py +++ b/src/sentry/integrations/bitbucket/uninstalled.py @@ -8,9 +8,8 @@ from sentry.constants import ObjectStatus from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_jwt from sentry.models.integrations.integration import Integration -from sentry.models.organization import Organization -from sentry.models.repository import Repository from sentry.services.hybrid_cloud.integration import integration_service +from sentry.services.hybrid_cloud.repository import repository_service @control_silo_endpoint @@ -44,15 +43,12 @@ def post(self, request: Request, *args, **kwargs) -> Response: org_integrations = integration_service.get_organization_integrations( integration_id=integration.id ) - organizations = Organization.objects.filter( - id__in=[oi.organization_id for oi in org_integrations] - ) - # TODO: Replace with repository_service; support status write - Repository.objects.filter( - organization_id__in=organizations.values_list("id", flat=True), - provider="integrations:bitbucket", - integration_id=integration.id, - ).update(status=ObjectStatus.DISABLED) + for oi in org_integrations: + repository_service.disable_repositories_for_integration( + organization_id=oi.organization_id, + integration_id=integration.id, + provider="integrations:bitbucket", + ) return self.respond() diff --git a/src/sentry/integrations/discord/actions/metric_alert.py b/src/sentry/integrations/discord/actions/metric_alert.py index 44d5c7816bb643..a4578a84c411cf 100644 --- a/src/sentry/integrations/discord/actions/metric_alert.py +++ b/src/sentry/integrations/discord/actions/metric_alert.py @@ -4,7 +4,8 @@ from sentry import features from sentry.incidents.charts import build_metric_alert_chart -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.discord.client import DiscordClient from sentry.integrations.discord.message_builder.metric_alerts import ( DiscordMetricAlertMessageBuilder, diff --git a/src/sentry/integrations/discord/message_builder/metric_alerts.py b/src/sentry/integrations/discord/message_builder/metric_alerts.py index 653b036dfa31c7..dc43aeac8cbeaa 100644 --- a/src/sentry/integrations/discord/message_builder/metric_alerts.py +++ b/src/sentry/integrations/discord/message_builder/metric_alerts.py @@ -3,7 +3,8 @@ import time from datetime import datetime -from sentry.incidents.models import AlertRule, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.discord.message_builder import INCIDENT_COLOR_MAPPING, LEVEL_TO_COLOR from sentry.integrations.discord.message_builder.base.base import DiscordMessageBuilder from sentry.integrations.discord.message_builder.base.embed.base import DiscordMessageEmbed diff --git a/src/sentry/integrations/metric_alerts.py b/src/sentry/integrations/metric_alerts.py index 618612d0f7fbed..cc628c6a042d8b 100644 --- a/src/sentry/integrations/metric_alerts.py +++ b/src/sentry/integrations/metric_alerts.py @@ -7,10 +7,9 @@ from sentry.constants import CRASH_RATE_ALERT_AGGREGATE_ALIAS from sentry.incidents.logic import get_incident_aggregates -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleThresholdType +from sentry.incidents.models.incident import ( INCIDENT_STATUS, - AlertRule, - AlertRuleThresholdType, Incident, IncidentStatus, IncidentTrigger, diff --git a/src/sentry/integrations/msteams/card_builder/incident_attachment.py b/src/sentry/integrations/msteams/card_builder/incident_attachment.py index 9ab336eb2582d3..72e6d0c67c1bf8 100644 --- a/src/sentry/integrations/msteams/card_builder/incident_attachment.py +++ b/src/sentry/integrations/msteams/card_builder/incident_attachment.py @@ -2,7 +2,7 @@ from typing import Literal -from sentry.incidents.models import Incident, IncidentStatus +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import incident_attachment_info from sentry.integrations.msteams.card_builder.block import ( AdaptiveCard, diff --git a/src/sentry/integrations/msteams/utils.py b/src/sentry/integrations/msteams/utils.py index c1a3f64ae05b84..b88de0a990b39c 100644 --- a/src/sentry/integrations/msteams/utils.py +++ b/src/sentry/integrations/msteams/utils.py @@ -3,7 +3,8 @@ import enum import logging -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.models.integrations.integration import Integration from sentry.services.hybrid_cloud.integration import integration_service diff --git a/src/sentry/integrations/opsgenie/utils.py b/src/sentry/integrations/opsgenie/utils.py index 189ba1cf309fae..06c031c1dbd31f 100644 --- a/src/sentry/integrations/opsgenie/utils.py +++ b/src/sentry/integrations/opsgenie/utils.py @@ -4,7 +4,8 @@ from typing import Any, cast from sentry.constants import ObjectStatus -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import incident_attachment_info from sentry.services.hybrid_cloud.integration import integration_service from sentry.services.hybrid_cloud.integration.model import RpcOrganizationIntegration diff --git a/src/sentry/integrations/pagerduty/utils.py b/src/sentry/integrations/pagerduty/utils.py index 1b7a853d7fcd7c..91a403802a095e 100644 --- a/src/sentry/integrations/pagerduty/utils.py +++ b/src/sentry/integrations/pagerduty/utils.py @@ -6,7 +6,8 @@ from django.db import router, transaction from django.http import Http404 -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import incident_attachment_info from sentry.models.integrations.organization_integration import OrganizationIntegration from sentry.services.hybrid_cloud.integration import integration_service diff --git a/src/sentry/integrations/repository/metric_alert.py b/src/sentry/integrations/repository/metric_alert.py index 21bd2b7339680d..2c446687339a90 100644 --- a/src/sentry/integrations/repository/metric_alert.py +++ b/src/sentry/integrations/repository/metric_alert.py @@ -3,7 +3,8 @@ from dataclasses import dataclass from logging import Logger, getLogger -from sentry.incidents.models import AlertRuleTriggerAction, Incident +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident from sentry.integrations.repository.base import BaseNewNotificationMessage, BaseNotificationMessage from sentry.models.notificationmessage import NotificationMessage @@ -22,9 +23,11 @@ def from_model(cls, instance: NotificationMessage) -> MetricAlertNotificationMes error_code=instance.error_code, error_details=instance.error_details, message_identifier=instance.message_identifier, - parent_notification_message_id=instance.parent_notification_message.id - if instance.parent_notification_message - else None, + parent_notification_message_id=( + instance.parent_notification_message.id + if instance.parent_notification_message + else None + ), incident=instance.incident, trigger_action=instance.trigger_action, date_added=instance.date_added, diff --git a/src/sentry/integrations/slack/message_builder/incidents.py b/src/sentry/integrations/slack/message_builder/incidents.py index bf59b5a0bd1018..e8e37ea400742e 100644 --- a/src/sentry/integrations/slack/message_builder/incidents.py +++ b/src/sentry/integrations/slack/message_builder/incidents.py @@ -1,6 +1,6 @@ from datetime import datetime -from sentry.incidents.models import Incident, IncidentStatus +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import incident_attachment_info from sentry.integrations.slack.message_builder import ( INCIDENT_COLOR_MAPPING, diff --git a/src/sentry/integrations/slack/message_builder/metric_alerts.py b/src/sentry/integrations/slack/message_builder/metric_alerts.py index 6f4566ab4886fe..3ea351d89981a2 100644 --- a/src/sentry/integrations/slack/message_builder/metric_alerts.py +++ b/src/sentry/integrations/slack/message_builder/metric_alerts.py @@ -1,4 +1,5 @@ -from sentry.incidents.models import AlertRule, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import metric_alert_attachment_info from sentry.integrations.slack.message_builder import ( INCIDENT_COLOR_MAPPING, diff --git a/src/sentry/integrations/slack/unfurl/metric_alerts.py b/src/sentry/integrations/slack/unfurl/metric_alerts.py index 166c268421ef2f..460d0df36b13a7 100644 --- a/src/sentry/integrations/slack/unfurl/metric_alerts.py +++ b/src/sentry/integrations/slack/unfurl/metric_alerts.py @@ -12,7 +12,8 @@ from sentry import features from sentry.incidents.charts import build_metric_alert_chart -from sentry.incidents.models import AlertRule, Incident +from sentry.incidents.models.alert_rule import AlertRule +from sentry.incidents.models.incident import Incident from sentry.integrations.slack.message_builder.metric_alerts import SlackMetricAlertMessageBuilder from sentry.models.integrations.integration import Integration from sentry.models.organization import Organization diff --git a/src/sentry/integrations/slack/utils/notifications.py b/src/sentry/integrations/slack/utils/notifications.py index 62f624f68f35d4..dc87acf545f6eb 100644 --- a/src/sentry/integrations/slack/utils/notifications.py +++ b/src/sentry/integrations/slack/utils/notifications.py @@ -8,7 +8,8 @@ from sentry import features from sentry.constants import ObjectStatus from sentry.incidents.charts import build_metric_alert_chart -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.repository import get_default_metric_alert_repository from sentry.integrations.repository.metric_alert import ( MetricAlertNotificationMessageRepository, diff --git a/src/sentry/integrations/slack/webhooks/action.py b/src/sentry/integrations/slack/webhooks/action.py index 8dffd76b55c058..b258439d3ad657 100644 --- a/src/sentry/integrations/slack/webhooks/action.py +++ b/src/sentry/integrations/slack/webhooks/action.py @@ -776,7 +776,7 @@ def post(self, request: Request) -> Response: use_block_kit = False if len(org_integrations): org_context = organization_service.get_organization_by_id( - id=org_integrations[0].organization_id + id=org_integrations[0].organization_id, include_projects=False, include_teams=False ) if org_context: use_block_kit = any( diff --git a/src/sentry/integrations/slack/webhooks/event.py b/src/sentry/integrations/slack/webhooks/event.py index 2d41db1f02fcf8..3bd70488286be6 100644 --- a/src/sentry/integrations/slack/webhooks/event.py +++ b/src/sentry/integrations/slack/webhooks/event.py @@ -133,7 +133,9 @@ def on_link_shared(self, request: Request, slack_request: SlackDMRequest) -> boo ) organization_id = ois[0].organization_id if len(ois) > 0 else None organization_context = ( - organization_service.get_organization_by_id(id=organization_id, user_id=None) + organization_service.get_organization_by_id( + id=organization_id, user_id=None, include_projects=False, include_teams=False + ) if organization_id else None ) diff --git a/src/sentry/integrations/utils/scope.py b/src/sentry/integrations/utils/scope.py index aa8a1d4dcfe9aa..54b54609d51d02 100644 --- a/src/sentry/integrations/utils/scope.py +++ b/src/sentry/integrations/utils/scope.py @@ -89,7 +89,9 @@ def bind_org_context_from_integration( check_tag_for_scope_bleed("integration_id", integration_id, add_to_scope=False) elif len(org_integrations) == 1: org_integration = org_integrations[0] - org = organization_service.get_organization_by_id(id=org_integration.organization_id) + org = organization_service.get_organization_by_id( + id=org_integration.organization_id, include_teams=False, include_projects=False + ) if org is not None: bind_organization_context(org.organization) else: diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py index 5e0e699662f779..9b15f831558564 100644 --- a/src/sentry/issues/attributes.py +++ b/src/sentry/issues/attributes.py @@ -6,7 +6,7 @@ import requests import urllib3 -from arroyo import Topic +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from django.conf import settings from django.db.models import F, Window @@ -16,6 +16,7 @@ from sentry_kafka_schemas.schema_types.group_attributes_v1 import GroupAttributesSnapshot from sentry import options +from sentry.conf.types.kafka_definition import Topic from sentry.models.group import Group from sentry.models.groupassignee import GroupAssignee from sentry.models.groupowner import GroupOwner, GroupOwnerType @@ -44,7 +45,7 @@ class GroupValues: def _get_attribute_snapshot_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_GROUP_ATTRIBUTES)["cluster"] + cluster_name = get_topic_definition(Topic.GROUP_ATTRIBUTES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -122,7 +123,7 @@ def produce_snapshot_to_kafka(snapshot: GroupAttributesSnapshot) -> None: raise snuba.SnubaError(err) else: payload = KafkaPayload(None, json.dumps(snapshot).encode("utf-8"), []) - _attribute_snapshot_producer.produce(Topic(settings.KAFKA_GROUP_ATTRIBUTES), payload) + _attribute_snapshot_producer.produce(ArroyoTopic(settings.KAFKA_GROUP_ATTRIBUTES), payload) def _retrieve_group_values(group_id: int) -> GroupValues: diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index e9699900c3bbe9..e11bbc35c7f81f 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -8,7 +8,7 @@ import jsonschema import sentry_sdk from django.utils import timezone -from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry_sdk.tracing import NoOpSpan, Span, Transaction from sentry import nodestore from sentry.event_manager import GroupInfo @@ -52,7 +52,7 @@ def save_event_from_occurrence( def lookup_event(project_id: int, event_id: str) -> Event: - data = nodestore.get(Event.generate_node_id(project_id, event_id)) + data = nodestore.backend.get(Event.generate_node_id(project_id, event_id)) if data is None: raise EventLookupError(f"Failed to lookup event({event_id}) for project_id({project_id})") event = Event(event_id=event_id, project_id=project_id) @@ -214,8 +214,8 @@ def _get_kwargs(payload: Mapping[str, Any]) -> Mapping[str, Any]: def process_occurrence_message( - message: Mapping[str, Any], txn: Transaction | NoOpSpan -) -> tuple[IssueOccurrence, GroupInfo | None]: + message: Mapping[str, Any], txn: Transaction | NoOpSpan | Span +) -> tuple[IssueOccurrence, GroupInfo | None] | None: with metrics.timer("occurrence_consumer._process_message._get_kwargs"): kwargs = _get_kwargs(message) occurrence_data = kwargs["occurrence_data"] @@ -260,7 +260,9 @@ def process_occurrence_message( return lookup_event_and_process_issue_occurrence(kwargs["occurrence_data"]) -def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, GroupInfo | None] | None: +def _process_message( + message: Mapping[str, Any] +) -> tuple[IssueOccurrence | None, GroupInfo | None] | None: """ :raises InvalidEventPayloadError: when the message is invalid :raises EventLookupError: when the provided event_id in the message couldn't be found. @@ -275,6 +277,9 @@ def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, Group payload_type = message.get("payload_type", PayloadType.OCCURRENCE.value) if payload_type == PayloadType.STATUS_CHANGE.value: group = process_status_change_message(message, txn) + if not group: + return None + return None, GroupInfo(group=group, is_new=False, is_regression=False) elif payload_type == PayloadType.OCCURRENCE.value: return process_occurrence_message(message, txn) @@ -287,4 +292,4 @@ def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, Group except (ValueError, KeyError) as e: txn.set_tag("result", "error") raise InvalidEventPayloadError(e) - return + return None diff --git a/src/sentry/issues/producer.py b/src/sentry/issues/producer.py index 5acfef85adcf18..3933af8cc19ae0 100644 --- a/src/sentry/issues/producer.py +++ b/src/sentry/issues/producer.py @@ -1,14 +1,17 @@ from __future__ import annotations import logging +import sys from collections.abc import MutableMapping from typing import Any, cast -from arroyo import Topic +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from arroyo.types import Message, Value +from confluent_kafka import KafkaException from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.issues.issue_occurrence import IssueOccurrence from sentry.issues.run import process_message from sentry.issues.status_change_message import StatusChangeMessage @@ -33,7 +36,7 @@ class PayloadType(ValueEqualityEnum): def _get_occurrence_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_INGEST_OCCURRENCES)["cluster"] + cluster_name = get_topic_definition(Topic.INGEST_OCCURRENCES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -68,7 +71,17 @@ def produce_occurrence_to_kafka( process_message(Message(Value(payload=payload, committable={}))) return - _occurrence_producer.produce(Topic(settings.KAFKA_INGEST_OCCURRENCES), payload) + try: + _occurrence_producer.produce(ArroyoTopic(settings.KAFKA_INGEST_OCCURRENCES), payload) + except KafkaException: + logger.exception( + "Failed to send occurrence to issue platform", + extra={ + "total_payload_size": sys.getsizeof(payload), + "total_payload_data_size": sys.getsizeof(payload_data), + "payload_data_key_sizes": {k: sys.getsizeof(v) for k, v in payload_data.items()}, + }, + ) def _prepare_occurrence_message( diff --git a/src/sentry/issues/status_change.py b/src/sentry/issues/status_change.py index 3f2b669a01353d..20595ab88cd651 100644 --- a/src/sentry/issues/status_change.py +++ b/src/sentry/issues/status_change.py @@ -31,18 +31,21 @@ def handle_status_update( is_bulk: bool, status_details: dict[str, Any], acting_user: User | None, - activity_type: str | None, sender: Any, ) -> ActivityInfo: """ Update the status for a list of groups and create entries for Activity and GroupHistory. + This currently handles unresolving or ignoring groups. Returns a tuple of (activity_type, activity_data) for the activity that was created. """ activity_data = {} + activity_type = ( + ActivityType.SET_IGNORED.value + if new_status == GroupStatus.IGNORED + else ActivityType.SET_UNRESOLVED.value + ) if new_status == GroupStatus.UNRESOLVED: - activity_type = ActivityType.SET_UNRESOLVED.value - for group in group_list: if group.status == GroupStatus.IGNORED: issue_unignored.send_robust( @@ -64,7 +67,6 @@ def handle_status_update( ignore_duration = ( status_details.pop("ignoreDuration", None) or status_details.pop("snoozeDuration", None) ) or None - activity_type = ActivityType.SET_IGNORED.value activity_data = { "ignoreCount": status_details.get("ignoreCount", None), "ignoreDuration": ignore_duration, diff --git a/src/sentry/issues/status_change_consumer.py b/src/sentry/issues/status_change_consumer.py index f5606f4d4d998b..0b2230ca1f67ff 100644 --- a/src/sentry/issues/status_change_consumer.py +++ b/src/sentry/issues/status_change_consumer.py @@ -5,7 +5,7 @@ from collections.abc import Iterable, Mapping, Sequence from typing import Any -from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry_sdk.tracing import NoOpSpan, Span, Transaction from sentry.issues.escalating import manage_issue_states from sentry.issues.status_change_message import StatusChangeMessageData @@ -174,7 +174,7 @@ def _get_status_change_kwargs(payload: Mapping[str, Any]) -> Mapping[str, Any]: def process_status_change_message( - message: Mapping[str, Any], txn: Transaction | NoOpSpan + message: Mapping[str, Any], txn: Transaction | NoOpSpan | Span ) -> Group | None: with metrics.timer("occurrence_consumer._process_message.status_change._get_kwargs"): kwargs = _get_status_change_kwargs(message) diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py index 6915afb5867f52..cc668e112fcda5 100644 --- a/src/sentry/lang/native/symbolicator.py +++ b/src/sentry/lang/native/symbolicator.py @@ -16,6 +16,7 @@ from sentry import options from sentry.lang.native.sources import ( get_internal_artifact_lookup_source, + get_internal_source, get_scraping_config, sources_for_symbolication, ) @@ -150,7 +151,10 @@ def process_minidump(self, minidump): } res = self._process( - "process_minidump", "minidump", data=data, files={"upload_file_minidump": minidump} + "process_minidump", + "minidump", + data=data, + files={"upload_file_minidump": minidump}, ) return process_response(res) @@ -176,7 +180,10 @@ def process_payload(self, stacktraces, modules, signal=None, apply_source_contex scraping_config = get_scraping_config(self.project) json = { "sources": sources, - "options": {"dif_candidates": True, "apply_source_context": apply_source_context}, + "options": { + "dif_candidates": True, + "apply_source_context": apply_source_context, + }, "stacktraces": stacktraces, "modules": modules, "scraping": scraping_config, @@ -207,6 +214,39 @@ def process_js(self, stacktraces, modules, release, dist, apply_source_context=T return self._process("symbolicate_js_stacktraces", "symbolicate-js", json=json) + def process_jvm( + self, + exceptions, + stacktraces, + modules, + release_package, + apply_source_context=True, + ): + """ + Process a JVM event by remapping its frames and exceptions with + ProGuard. + + :param exceptions: The event's exceptions. These must contain a `type` and a `module`. + :param stacktraces: The event's stacktraces. Frames must contain a `function` and a `module`. + :param modules: ProGuard modules to use for deobfuscation. They must contain a `uuid`. + :param release_package: The name of the release's package. This is optional. + :param apply_source_context: Whether to add source context to frames. + """ + source = get_internal_source(self.project) + + json = { + "sources": [source], + "exceptions": exceptions, + "stacktraces": stacktraces, + "modules": modules, + "options": {"apply_source_context": apply_source_context}, + } + + if release_package is not None: + json["release_package"] = release_package + + return self._process("symbolicate_jvm_stacktraces", "symbolicate-jvm", json=json) + class TaskIdNotFound(Exception): pass diff --git a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py index 70454ff2713be0..f0f9eded8e110b 100644 --- a/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py +++ b/src/sentry/migrations/0001_squashed_0484_break_org_member_user_fk.py @@ -9158,7 +9158,9 @@ class Migration(CheckedMigration): ( "environment", sentry.db.models.fields.foreignkey.FlexibleForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="sentry.Environment" + on_delete=django.db.models.deletion.CASCADE, + to="sentry.Environment", + db_constraint=False, ), ), ( diff --git a/src/sentry/migrations/0361_monitor_environment.py b/src/sentry/migrations/0361_monitor_environment.py index c5bb4e0ea00c29..829c4cee7fe621 100644 --- a/src/sentry/migrations/0361_monitor_environment.py +++ b/src/sentry/migrations/0361_monitor_environment.py @@ -43,7 +43,9 @@ class Migration(CheckedMigration): ( "environment", sentry.db.models.fields.foreignkey.FlexibleForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="sentry.Environment" + on_delete=django.db.models.deletion.CASCADE, + to="sentry.Environment", + db_constraint=False, ), ), ( diff --git a/src/sentry/migrations/0651_enable_activated_alert_rules.py b/src/sentry/migrations/0651_enable_activated_alert_rules.py index 04968bb479fcda..043020cb96ab70 100644 --- a/src/sentry/migrations/0651_enable_activated_alert_rules.py +++ b/src/sentry/migrations/0651_enable_activated_alert_rules.py @@ -6,7 +6,7 @@ import sentry.db.models.fields.bounded import sentry.db.models.fields.foreignkey -import sentry.incidents.models +import sentry.incidents.models.alert_rule from sentry.new_migrations.migrations import CheckedMigration @@ -45,7 +45,7 @@ class Migration(CheckedMigration): model_name="alertrule", name="monitor_type", field=models.IntegerField( - default=sentry.incidents.models.AlertRuleMonitorType.CONTINUOUS.value + default=sentry.incidents.models.alert_rule.AlertRuleMonitorType.CONTINUOUS.value ), ), ], diff --git a/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py b/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py index b7b49e554bf5b0..a23e9a672ef6a1 100644 --- a/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py +++ b/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py @@ -17,8 +17,13 @@ def fix_cron_monitor_invalid_orgs(apps, schema_editor) -> None: continue if project.organization_id != monitor.organization_id: - monitor.organization_id = project.organization_id - monitor.save(update_fields=["organization_id"]) + if Monitor.objects.filter(organization_id=project.organization_id, slug=monitor.slug): + # There are a small number of these and due to the way ingest works they can't + # receive checkins, so they're totally broken. Just delete. + monitor.delete() + else: + monitor.organization_id = project.organization_id + monitor.save(update_fields=["organization_id", "slug"]) class Migration(CheckedMigration): diff --git a/src/sentry/migrations/0661_artifactbundleindex_cleanup_step2.py b/src/sentry/migrations/0661_artifactbundleindex_cleanup_step2.py new file mode 100644 index 00000000000000..e5c5f50d959fe4 --- /dev/null +++ b/src/sentry/migrations/0661_artifactbundleindex_cleanup_step2.py @@ -0,0 +1,61 @@ +# Generated by Django 5.0.2 on 2024-03-04 10:43 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0660_fix_cron_monitor_invalid_orgs"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[], + state_operations=[ + migrations.RemoveField( + model_name="flatfileindexstate", + name="flat_file_index", + ), + migrations.AlterUniqueTogether( + name="flatfileindexstate", + unique_together=None, + ), + migrations.RemoveField( + model_name="flatfileindexstate", + name="artifact_bundle", + ), + migrations.RemoveField( + model_name="artifactbundleindex", + name="date_last_modified", + ), + migrations.RemoveField( + model_name="artifactbundleindex", + name="dist_name", + ), + migrations.RemoveField( + model_name="artifactbundleindex", + name="release_name", + ), + migrations.DeleteModel( + name="ArtifactBundleFlatFileIndex", + ), + migrations.DeleteModel( + name="FlatFileIndexState", + ), + ], + ) + ] diff --git a/src/sentry/migrations/0662_monitor_drop_last_state_change.py b/src/sentry/migrations/0662_monitor_drop_last_state_change.py new file mode 100644 index 00000000000000..46974b99824d80 --- /dev/null +++ b/src/sentry/migrations/0662_monitor_drop_last_state_change.py @@ -0,0 +1,32 @@ +# Generated by Django 5.0.2 on 2024-03-05 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0661_artifactbundleindex_cleanup_step2"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[], + state_operations=[ + migrations.RemoveField(model_name="monitorenvironment", name="last_state_change"), + ], + ) + ] diff --git a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py new file mode 100644 index 00000000000000..895c1324341777 --- /dev/null +++ b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py @@ -0,0 +1,55 @@ +# Generated by Django 5.0.2 on 2024-03-04 10:50 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0662_monitor_drop_last_state_change"), + ] + + operations = [ + migrations.RunSQL( + """ + DROP TABLE "sentry_artifactbundleflatfileindex"; + """, + # We just create a fake table here so that the DROP will work if we roll back the migration. + reverse_sql="CREATE TABLE sentry_artifactbundleflatfileindex (fake_col int);", + hints={"tables": ["sentry_artifactbundleflatfileindex"]}, + ), + migrations.RunSQL( + """ + DROP TABLE "sentry_flatfileindexstate"; + """, + # We just create a fake table here so that the DROP will work if we roll back the migration. + reverse_sql="CREATE TABLE sentry_flatfileindexstate (fake_col int);", + hints={"tables": ["sentry_flatfileindexstate"]}, + ), + migrations.RunSQL( + """ + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "release_name"; + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "dist_name"; + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "date_last_modified"; + """, + reverse_sql=""" + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "release_name" varchar(250) NULL; + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "dist_name" varchar(64) NULL; + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "date_last_modified" timestamptz NULL; + """, + hints={"tables": ["sentry_artifactbundleindex"]}, + ), + ] diff --git a/src/sentry/migrations/0664_create_new_broken_monitor_detection_table.py b/src/sentry/migrations/0664_create_new_broken_monitor_detection_table.py new file mode 100644 index 00000000000000..8ba0f377138103 --- /dev/null +++ b/src/sentry/migrations/0664_create_new_broken_monitor_detection_table.py @@ -0,0 +1,51 @@ +# Generated by Django 5.0.2 on 2024-03-06 18:54 + +import django.db.models.deletion +from django.db import migrations, models + +import sentry.db.models.fields.bounded +import sentry.db.models.fields.foreignkey +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0663_artifactbundleindex_cleanup_step3"), + ] + + operations = [ + migrations.CreateModel( + name="MonitorEnvBrokenDetection", + fields=[ + ( + "id", + sentry.db.models.fields.bounded.BoundedBigAutoField( + primary_key=True, serialize=False + ), + ), + ("detection_timestamp", models.DateTimeField(auto_now_add=True)), + ("user_notified_timestamp", models.DateTimeField(db_index=True, null=True)), + ( + "monitor_incident", + sentry.db.models.fields.foreignkey.FlexibleForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="sentry.monitorincident" + ), + ), + ], + options={ + "db_table": "sentry_monitorenvbrokendetection", + }, + ), + ] diff --git a/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py new file mode 100644 index 00000000000000..290e08308f35b3 --- /dev/null +++ b/src/sentry/migrations/0665_monitor_drop_last_state_change_db.py @@ -0,0 +1,40 @@ +# Generated by Django 5.0.2 on 2024-03-06 18:06 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0664_create_new_broken_monitor_detection_table"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[ + migrations.RunSQL( + """ + ALTER TABLE "sentry_monitorenvironment" DROP COLUMN "last_state_change"; + """, + reverse_sql=""" + ALTER TABLE "sentry_monitorenvironment" ADD COLUMN "last_state_change" timestamptz; + """, + hints={"tables": ["sentry_monitorenvironment"]}, + ) + ], + state_operations=[], + ) + ] diff --git a/src/sentry/migrations/0666_monitor_incident_default_grouphash.py b/src/sentry/migrations/0666_monitor_incident_default_grouphash.py new file mode 100644 index 00000000000000..053a8135aedb07 --- /dev/null +++ b/src/sentry/migrations/0666_monitor_incident_default_grouphash.py @@ -0,0 +1,32 @@ +# Generated by Django 5.0.2 on 2024-03-06 21:13 + +from django.db import migrations, models + +import sentry.monitors.models +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0665_monitor_drop_last_state_change_db"), + ] + + operations = [ + migrations.AlterField( + model_name="monitorincident", + name="grouphash", + field=models.CharField(default=sentry.monitors.models.default_grouphash, max_length=32), + ), + ] diff --git a/src/sentry/models/artifactbundle.py b/src/sentry/models/artifactbundle.py index 7eea9de67834ee..93a4bc253fd01f 100644 --- a/src/sentry/models/artifactbundle.py +++ b/src/sentry/models/artifactbundle.py @@ -134,40 +134,6 @@ def delete_file_for_artifact_bundle(instance, **kwargs): post_delete.connect(delete_file_for_artifact_bundle, sender=ArtifactBundle) -@region_silo_only_model -class ArtifactBundleFlatFileIndex(Model): - __relocation_scope__ = RelocationScope.Excluded - - project_id = BoundedBigIntegerField(db_index=True) - release_name = models.CharField(max_length=250) - dist_name = models.CharField(max_length=64, default=NULL_STRING) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_artifactbundleflatfileindex" - - unique_together = (("project_id", "release_name", "dist_name"),) - - -@region_silo_only_model -class FlatFileIndexState(Model): - __relocation_scope__ = RelocationScope.Excluded - - flat_file_index = FlexibleForeignKey("sentry.ArtifactBundleFlatFileIndex", db_constraint=False) - artifact_bundle = FlexibleForeignKey("sentry.ArtifactBundle", db_constraint=False) - indexing_state = models.IntegerField( - choices=ArtifactBundleIndexingState.choices(), db_index=True - ) - date_added = models.DateTimeField(default=timezone.now) - - class Meta: - app_label = "sentry" - db_table = "sentry_flatfileindexstate" - - unique_together = (("flat_file_index", "artifact_bundle"),) - - @region_silo_only_model class ArtifactBundleIndex(Model): __relocation_scope__ = RelocationScope.Excluded @@ -177,13 +143,6 @@ class ArtifactBundleIndex(Model): url = models.TextField() date_added = models.DateTimeField(default=timezone.now) - # TODO: legacy fields: - # These will eventually be removed in a migration, as they can be joined - # via the `{Release,}ArtifactBundle` tables. - release_name = models.CharField(max_length=250, null=True) - dist_name = models.CharField(max_length=64, null=True, default=NULL_STRING) - date_last_modified = models.DateTimeField(null=True, default=timezone.now) - class Meta: app_label = "sentry" db_table = "sentry_artifactbundleindex" diff --git a/src/sentry/models/files/abstractfileblob.py b/src/sentry/models/files/abstractfileblob.py index 6f320c4345b2de..6598e1da17a4c5 100644 --- a/src/sentry/models/files/abstractfileblob.py +++ b/src/sentry/models/files/abstractfileblob.py @@ -15,10 +15,9 @@ from sentry.db.models import BoundedPositiveIntegerField, Model from sentry.models.files.abstractfileblobowner import AbstractFileBlobOwner from sentry.models.files.utils import ( + get_and_optionally_update_blob, get_size_and_checksum, get_storage, - lock_blob, - locked_blob, nooplogger, ) from sentry.utils import metrics @@ -68,10 +67,9 @@ def from_files(cls, files, organization=None, logger=nooplogger): checksums_seen = set() blobs_to_save = [] - locks = set() semaphore = Semaphore(value=MULTI_BLOB_UPLOAD_CONCURRENCY) - def _upload_and_pend_chunk(fileobj, size, checksum, lock): + def _upload_and_pend_chunk(fileobj, size, checksum): logger.debug( "FileBlob.from_files._upload_and_pend_chunk.start", extra={"checksum": checksum, "size": size}, @@ -80,7 +78,7 @@ def _upload_and_pend_chunk(fileobj, size, checksum, lock): blob.path = cls.generate_unique_path() storage = get_storage(cls._storage_config()) storage.save(blob.path, fileobj) - blobs_to_save.append((blob, lock)) + blobs_to_save.append(blob) metrics.distribution( "filestore.blob-size", size, tags={"function": "from_files"}, unit="byte" ) @@ -123,13 +121,11 @@ def _save_blob(blob): def _flush_blobs(): while True: try: - blob, lock = blobs_to_save.pop() + blob = blobs_to_save.pop() except IndexError: break _save_blob(blob) - lock.__exit__(None, None, None) - locks.discard(lock) semaphore.release() try: @@ -152,19 +148,13 @@ def _flush_blobs(): continue checksums_seen.add(checksum) - # Check if we need to lock the blob. If we get a result back + # Check if we need to upload the blob. If we get a result back # here it means the blob already exists. - lock = locked_blob(cls, size, checksum, logger=logger) - existing = lock.__enter__() + existing = get_and_optionally_update_blob(cls, checksum) if existing is not None: - lock.__exit__(None, None, None) _ensure_blob_owned(existing) continue - # Remember the lock to force unlock all at the end if we - # encounter any difficulties. - locks.add(lock) - # Otherwise we leave the blob locked and submit the task. # We use the semaphore to ensure we never schedule too # many. The upload will be done with a certain amount @@ -172,16 +162,11 @@ def _flush_blobs(): # `_flush_blobs` call will take all those uploaded # blobs and associate them with the database. semaphore.acquire() - exe.submit(_upload_and_pend_chunk(fileobj, size, checksum, lock)) + exe.submit(_upload_and_pend_chunk(fileobj, size, checksum)) logger.debug("FileBlob.from_files.end", extra={"checksum": reference_checksum}) _flush_blobs() finally: - for lock in locks: - try: - lock.__exit__(None, None, None) - except Exception: - pass logger.debug("FileBlob.from_files.end") @classmethod @@ -194,24 +179,22 @@ def from_file(cls, fileobj, logger=nooplogger) -> Self: size, checksum = get_size_and_checksum(fileobj) - # TODO(dcramer): the database here is safe, but if this lock expires - # and duplicate files are uploaded then we need to prune one - with locked_blob(cls, size, checksum, logger=logger) as existing: - if existing is not None: - return existing + existing = get_and_optionally_update_blob(cls, checksum) + if existing is not None: + return existing - blob = cls(size=size, checksum=checksum) - blob.path = cls.generate_unique_path() - storage = get_storage(cls._storage_config()) - storage.save(blob.path, fileobj) - try: - blob.save() - except IntegrityError: - # see `_save_blob` above - metrics.incr("filestore.upload_race", sample_rate=1.0) - saved_path = blob.path - blob = cls.objects.get(checksum=checksum) - storage.delete(saved_path) + blob = cls(size=size, checksum=checksum) + blob.path = cls.generate_unique_path() + storage = get_storage(cls._storage_config()) + storage.save(blob.path, fileobj) + try: + blob.save() + except IntegrityError: + # see `_save_blob` above + metrics.incr("filestore.upload_race", sample_rate=1.0) + saved_path = blob.path + blob = cls.objects.get(checksum=checksum) + storage.delete(saved_path) metrics.distribution("filestore.blob-size", size, unit="byte") logger.debug("FileBlob.from_file.end") @@ -235,11 +218,7 @@ def delete(self, *args, **kwargs): self.DELETE_FILE_TASK.apply_async( kwargs={"path": self.path, "checksum": self.checksum}, countdown=60 ) - lock = lock_blob( - self.checksum, "fileblob_upload_delete", metric_instance="lock.fileblob.delete" - ) - with lock: - super().delete(*args, **kwargs) + super().delete(*args, **kwargs) def getfile(self): """ diff --git a/src/sentry/models/files/utils.py b/src/sentry/models/files/utils.py index 93255c3db764af..e163199521fab2 100644 --- a/src/sentry/models/files/utils.py +++ b/src/sentry/models/files/utils.py @@ -2,29 +2,20 @@ import os import time -from contextlib import contextmanager from datetime import timedelta from hashlib import sha1 +from typing import Any from django.conf import settings from django.utils import timezone -from rediscluster import RedisCluster -from sentry import options -from sentry.locks import locks -from sentry.utils import redis from sentry.utils.imports import import_string -from sentry.utils.retries import TimedRetryPolicy ONE_DAY = 60 * 60 * 24 ONE_DAY_AND_A_HALF = int(ONE_DAY * 1.5) HALF_DAY = timedelta(hours=12) -UPLOAD_RETRY_TIME = getattr(settings, "SENTRY_UPLOAD_RETRY_TIME", 60) # 1min - DEFAULT_BLOB_SIZE = 1024 * 1024 # one mb -CHUNK_STATE_HEADER = "__state" - MAX_FILE_SIZE = 2**31 # 2GB is the maximum offset supported by fileblob @@ -54,65 +45,23 @@ def get_size_and_checksum(fileobj, logger=nooplogger): return size, checksum.hexdigest() -@contextmanager -def lock_blob(checksum: str, name: str, metric_instance: str | None = None): - if not options.get("fileblob.upload.use_lock"): - yield - return - - lock = locks.get(f"fileblob:upload:{checksum}", duration=UPLOAD_RETRY_TIME, name=name) - with TimedRetryPolicy(UPLOAD_RETRY_TIME, metric_instance=metric_instance)(lock.acquire): - yield - - -def _get_redis_for_blobs() -> RedisCluster: - cluster_key = settings.SENTRY_DEBUG_FILES_REDIS_CLUSTER - return redis.redis_clusters.get(cluster_key) # type: ignore[return-value] - - -def _redis_key_for_blob(file_blob_model, checksum): - return f"fileblob:{file_blob_model.__name__}:{checksum}" - - -def _get_cached_blob_id(file_blob_model, checksum): - if not options.get("fileblob.upload.use_blobid_cache"): - return None - redis = _get_redis_for_blobs() - if id := redis.get(_redis_key_for_blob(file_blob_model, checksum)): - return int(id) - return None - - -def cache_blob_id(file_blob_model, checksum, id): - if not options.get("fileblob.upload.use_blobid_cache"): - return - redis = _get_redis_for_blobs() - redis.set(_redis_key_for_blob(file_blob_model, checksum), str(id), ex=HALF_DAY.seconds) - +def get_and_optionally_update_blob(file_blob_model: Any, checksum: str): + """ + Returns the `FileBlob` (actually generic `file_blob_model`) identified by its `checksum`. + This will also bump its `timestamp` in a debounced fashion, + in order to prevent it from being cleaned up. + """ + try: + existing = file_blob_model.objects.get(checksum=checksum) -@contextmanager -def locked_blob(file_blob_model, size, checksum, logger=nooplogger): - if cached_id := _get_cached_blob_id(file_blob_model, checksum): - yield file_blob_model(id=cached_id, size=size, checksum=checksum) - return + now = timezone.now() + threshold = now - HALF_DAY + if existing.timestamp <= threshold: + existing.update(timestamp=now) + except file_blob_model.DoesNotExist: + existing = None - logger.debug("locked_blob.start", extra={"checksum": checksum}) - lock = lock_blob(checksum, "fileblob_upload_model", metric_instance="lock.fileblob.upload") - with lock: - logger.debug("locked_blob.acquired", extra={"checksum": checksum}) - # test for presence - try: - existing = file_blob_model.objects.get(checksum=checksum) - cache_blob_id(file_blob_model, checksum, existing.id) - - now = timezone.now() - threshold = now - HALF_DAY - if existing.timestamp <= threshold: - existing.update(timestamp=now) - except file_blob_model.DoesNotExist: - existing = None - yield existing - logger.debug("locked_blob.end", extra={"checksum": checksum}) + return existing class AssembleChecksumMismatch(Exception): diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py index f532388d3fc89d..b09e78cbdd25dd 100644 --- a/src/sentry/models/project.py +++ b/src/sentry/models/project.py @@ -312,8 +312,9 @@ def __str__(self): def next_short_id(self, delta: int = 1) -> int: from sentry.models.counter import Counter - with sentry_sdk.start_span(op="project.next_short_id") as span, metrics.timer( - "project.next_short_id" + with ( + sentry_sdk.start_span(op="project.next_short_id") as span, + metrics.timer("project.next_short_id"), ): span.set_data("project_id", self.id) span.set_data("project_slug", self.slug) @@ -412,7 +413,7 @@ def get_full_name(self): return self.slug def transfer_to(self, organization): - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule from sentry.models.actor import ACTOR_TYPES from sentry.models.environment import Environment, EnvironmentProject from sentry.models.integrations.external_issue import ExternalIssue @@ -547,7 +548,7 @@ def add_team(self, team): return True def remove_team(self, team): - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule from sentry.models.projectteam import ProjectTeam from sentry.models.rule import Rule diff --git a/src/sentry/models/releases/release_project.py b/src/sentry/models/releases/release_project.py index 43bc412a0ed269..4e3a38a50148dc 100644 --- a/src/sentry/models/releases/release_project.py +++ b/src/sentry/models/releases/release_project.py @@ -49,7 +49,7 @@ def _subscribe_project_to_alert_rule( NOTE: import AlertRule model here to avoid circular dependency TODO: move once AlertRule has been split into separate subdirectory files """ - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule query_extra = f"release:{release.version} AND event.timestamp:>{timezone.now().isoformat()}" return AlertRule.objects.conditionally_subscribe_project_to_alert_rules( diff --git a/src/sentry/monitors/endpoints/base.py b/src/sentry/monitors/endpoints/base.py index 7fad81aa44ccc7..7a219b63cb10f0 100644 --- a/src/sentry/monitors/endpoints/base.py +++ b/src/sentry/monitors/endpoints/base.py @@ -24,6 +24,8 @@ from sentry.monitors.models import CheckInStatus, Monitor, MonitorCheckIn, MonitorEnvironment from sentry.utils.sdk import bind_organization_context, configure_scope +DEPRECATED_INGEST_API_MESSAGE = "We have removed this deprecated API. Please migrate to using DSN instead: https://docs.sentry.io/product/crons/legacy-endpoint-migration/#am-i-using-legacy-endpoints" + class OrganizationMonitorPermission(OrganizationPermission): scope_map = { diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py index da7486bb298845..3918e78013dd1d 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py @@ -27,7 +27,7 @@ from ... import features from ...api.exceptions import ResourceDoesNotExist -from .base import MonitorIngestEndpoint +from .base import DEPRECATED_INGEST_API_MESSAGE, MonitorIngestEndpoint @region_silo_endpoint @@ -72,7 +72,7 @@ def put( the most recent (by creation date) check-in which is still mutable (not marked as finished). """ if features.has("organizations:crons-disable-ingest-endpoints", project.organization): - raise ResourceDoesNotExist + raise ResourceDoesNotExist(detail=DEPRECATED_INGEST_API_MESSAGE) if checkin.status in CheckInStatus.FINISHED_VALUES: return self.respond(status=400) diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py index fff161979939e0..9c4898790e77f1 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py @@ -39,7 +39,7 @@ from sentry.utils import metrics from ...api.exceptions import ResourceDoesNotExist -from .base import MonitorIngestEndpoint +from .base import DEPRECATED_INGEST_API_MESSAGE, MonitorIngestEndpoint logger = logging.getLogger(__name__) @@ -104,7 +104,7 @@ def post( Note: If a DSN is utilized for authentication, the response will be limited in details. """ if features.has("organizations:crons-disable-ingest-endpoints", project.organization): - raise ResourceDoesNotExist + raise ResourceDoesNotExist(detail=DEPRECATED_INGEST_API_MESSAGE) if monitor and monitor.status in [ ObjectStatus.PENDING_DELETION, diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 839aa1a6dc2a42..bb4479d5ce88eb 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -7,7 +7,6 @@ from django.db.models import Q from sentry import features -from sentry.grouping.utils import hash_from_values from sentry.issues.grouptype import ( MonitorCheckInFailure, MonitorCheckInMissed, @@ -70,15 +69,6 @@ def mark_failed( "next_checkin_latest": next_checkin_latest, } - # Additionally update status when not using thresholds. The threshold based - # failure will only update status once it has passed the threshold. - if not failure_issue_threshold: - failed_status_map = { - CheckInStatus.MISSED: MonitorStatus.MISSED_CHECKIN, - CheckInStatus.TIMEOUT: MonitorStatus.TIMEOUT, - } - field_updates["status"] = failed_status_map.get(failed_checkin.status, MonitorStatus.ERROR) - affected = monitors_to_update.update(**field_updates) # If we did not update the monitor environment it means there was a newer @@ -95,12 +85,11 @@ def mark_failed( monitor_env.refresh_from_db() # Create incidents + issues - use_issue_platform = False try: organization = Organization.objects.get_from_cache(id=monitor_env.monitor.organization_id) use_issue_platform = features.has("organizations:issue-platform", organization=organization) except Organization.DoesNotExist: - pass + use_issue_platform = False if use_issue_platform: return mark_failed_threshold(failed_checkin, failure_issue_threshold) @@ -119,24 +108,7 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol # check to see if we need to update the status if monitor_env.status in [MonitorStatus.OK, MonitorStatus.ACTIVE]: - # evaluation logic for multiple check-ins - if failure_issue_threshold > 1: - # reverse the list after slicing in order to start with oldest check-in - # use .values() to speed up query - previous_checkins = list( - reversed( - MonitorCheckIn.objects.filter( - monitor_environment=monitor_env, date_added__lte=failed_checkin.date_added - ) - .order_by("-date_added") - .values("id", "date_added", "status")[:failure_issue_threshold] - ) - ) - # check for any successful previous check-in - if any([checkin["status"] == CheckInStatus.OK for checkin in previous_checkins]): - return False - # if threshold is 1, just use the most recent check-in - else: + if failure_issue_threshold == 1: previous_checkins = [ { "id": failed_checkin.id, @@ -144,26 +116,47 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol "status": failed_checkin.status, } ] + else: + previous_checkins = ( + # Using .values for performance reasons + MonitorCheckIn.objects.filter( + monitor_environment=monitor_env, date_added__lte=failed_checkin.date_added + ) + .order_by("-date_added") + .values("id", "date_added", "status") + ) + + # reverse the list after slicing in order to start with oldest check-in + previous_checkins = list(reversed(previous_checkins[:failure_issue_threshold])) + + # If we have any successful check-ins within the threshold of + # commits we have NOT reached an incident state + if any([checkin["status"] == CheckInStatus.OK for checkin in previous_checkins]): + return False # change monitor status + update fingerprint timestamp monitor_env.status = MonitorStatus.ERROR - monitor_env.last_state_change = monitor_env.last_checkin - monitor_env.save(update_fields=("status", "last_state_change")) - - # Do not create incident if monitor is muted - if not monitor_muted: - starting_checkin = previous_checkins[0] - - # for new incidents, generate a new hash from a uuid to use - fingerprint = hash_from_values([uuid.uuid4()]) - - MonitorIncident.objects.create( - monitor=monitor_env.monitor, - monitor_environment=monitor_env, - starting_checkin_id=starting_checkin["id"], - starting_timestamp=starting_checkin["date_added"], - grouphash=fingerprint, - ) + monitor_env.save(update_fields=("status",)) + + # Do not create incident if monitor is muted. This check happens late + # as we still want the status to have been updated + if monitor_muted: + return True + + starting_checkin = previous_checkins[0] + + # for new incidents, generate a uuid as the fingerprint. This is + # not deterministic of any property of the incident and is simply + # used to associate the incident to it's event occurrences + fingerprint = uuid.uuid4().hex + + MonitorIncident.objects.create( + monitor=monitor_env.monitor, + monitor_environment=monitor_env, + starting_checkin_id=starting_checkin["id"], + starting_timestamp=starting_checkin["date_added"], + grouphash=fingerprint, + ) elif monitor_env.status in [ MonitorStatus.ERROR, MonitorStatus.MISSED_CHECKIN, @@ -189,9 +182,11 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol if monitor_muted: return True - for previous_checkin in previous_checkins: - checkin_from_db = MonitorCheckIn.objects.get(id=previous_checkin["id"]) - create_issue_platform_occurrence(checkin_from_db, fingerprint) + # Do not create event/occurrence if we don't have a fingerprint + if fingerprint: + checkins = MonitorCheckIn.objects.filter(id__in=[c["id"] for c in previous_checkins]) + for previous_checkin in checkins: + create_issue_platform_occurrence(previous_checkin, fingerprint) monitor_environment_failed.send(monitor_environment=monitor_env, sender=type(monitor_env)) @@ -257,7 +252,7 @@ def create_legacy_event(failed_checkin: MonitorCheckIn): def create_issue_platform_occurrence( failed_checkin: MonitorCheckIn, - fingerprint=None, + fingerprint: str, ): from sentry.issues.issue_occurrence import IssueEvidence, IssueOccurrence from sentry.issues.producer import PayloadType, produce_occurrence_to_kafka @@ -278,13 +273,7 @@ def create_issue_platform_occurrence( resource_id=None, project_id=monitor_env.monitor.project_id, event_id=uuid.uuid4().hex, - fingerprint=[ - fingerprint - if fingerprint - else hash_from_values( - ["monitor", str(monitor_env.monitor.guid), occurrence_data["reason"]] - ) - ], + fingerprint=[fingerprint], type=occurrence_data["group_type"], issue_title=f"Monitor failure: {monitor_env.monitor.name}", subtitle=occurrence_data["subtitle"], @@ -314,13 +303,7 @@ def create_issue_platform_occurrence( "contexts": {"monitor": get_monitor_environment_context(monitor_env)}, "environment": monitor_env.get_environment().name, "event_id": occurrence.event_id, - "fingerprint": [fingerprint] - if fingerprint - else [ - "monitor", - str(monitor_env.monitor.guid), - occurrence_data["reason"], - ], + "fingerprint": [fingerprint], "platform": "other", "project_id": monitor_env.monitor.project_id, "received": current_timestamp.isoformat(), diff --git a/src/sentry/monitors/logic/mark_ok.py b/src/sentry/monitors/logic/mark_ok.py index e113220a550186..f331d21b4590f2 100644 --- a/src/sentry/monitors/logic/mark_ok.py +++ b/src/sentry/monitors/logic/mark_ok.py @@ -62,11 +62,11 @@ def mark_ok(checkin: MonitorCheckIn, ts: datetime): # Only send an occurrence if we have an active incident for grouphash in active_incidents.values_list("grouphash", flat=True): resolve_incident_group(grouphash, checkin.monitor.project_id) - if active_incidents.update( + + active_incidents.update( resolving_checkin=checkin, resolving_timestamp=checkin.date_added, - ): - params["last_state_change"] = ts + ) else: # Don't update status if incident isn't recovered params.pop("status", None) diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py index 3b9b5009b28e4d..686bc318cdc055 100644 --- a/src/sentry/monitors/models.py +++ b/src/sentry/monitors/models.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +import uuid import zoneinfo from collections.abc import Sequence from datetime import datetime @@ -32,7 +33,6 @@ ) from sentry.db.models.fields.slug import SentrySlugField from sentry.db.models.utils import slugify_instance -from sentry.grouping.utils import hash_from_values from sentry.locks import locks from sentry.models.environment import Environment from sentry.models.rule import Rule, RuleSource @@ -606,11 +606,6 @@ class MonitorEnvironment(Model): auto-generated missed check-ins. """ - last_state_change = models.DateTimeField(null=True) - """ - The last time that the monitor changed state. Used for issue fingerprinting. - """ - objects: ClassVar[MonitorEnvironmentManager] = MonitorEnvironmentManager() class Meta: @@ -642,8 +637,10 @@ def get_last_successful_checkin(self): @property def incident_grouphash(self): - # TODO(rjo100): Check to see if there's an active incident - # if not, use last_state_change as fallback + """ + Retrieve the grouphash for the current active incident. If there is no + active incident None will be returned. + """ active_incident = ( MonitorIncident.objects.filter( monitor_environment_id=self.id, resolving_checkin__isnull=True @@ -654,18 +651,7 @@ def incident_grouphash(self): if active_incident: return active_incident.grouphash - # XXX(rjo100): While we migrate monitor issues to using the - # Incident stored grouphash we still may have some active issues - # that are using the old hashes. We can remove this in the - # future once all existing issues are resolved. - return hash_from_values( - [ - "monitor", - str(self.monitor.guid), - self.get_environment().name, - str(self.last_state_change), - ] - ) + return None @receiver(pre_save, sender=MonitorEnvironment) @@ -680,6 +666,13 @@ def check_monitor_environment_limits(sender, instance, **kwargs): ) +def default_grouphash(): + """ + Generate a unique 32 character grouphash for a monitor incident + """ + return uuid.uuid4().hex + + @region_silo_only_model class MonitorIncident(Model): __relocation_scope__ = RelocationScope.Excluded @@ -702,10 +695,33 @@ class MonitorIncident(Model): This represents the final OK check-in that we receive """ - grouphash = models.CharField(max_length=32) + grouphash = models.CharField(max_length=32, default=default_grouphash) + """ + Used for issue occurances generation. Failed check-ins produce occurance + associated to this grouphash. + """ + date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = "sentry" db_table = "sentry_monitorincident" indexes = [models.Index(fields=["monitor_environment", "resolving_checkin"])] + + +@region_silo_only_model +class MonitorEnvBrokenDetection(Model): + """ + Records an instance where we have detected a monitor environment to be + broken based on a long duration of failure and consecutive failing check-ins + """ + + __relocation_scope__ = RelocationScope.Excluded + + monitor_incident = FlexibleForeignKey("sentry.MonitorIncident") + detection_timestamp = models.DateTimeField(auto_now_add=True) + user_notified_timestamp = models.DateTimeField(null=True, db_index=True) + + class Meta: + app_label = "sentry" + db_table = "sentry_monitorenvbrokendetection" diff --git a/src/sentry/monitors/tasks.py b/src/sentry/monitors/tasks.py index 79f86b62a7f404..82fd558235d5a5 100644 --- a/src/sentry/monitors/tasks.py +++ b/src/sentry/monitors/tasks.py @@ -7,11 +7,13 @@ import msgpack import sentry_sdk -from arroyo import Partition, Topic +from arroyo import Partition +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from confluent_kafka.admin import AdminClient, PartitionMetadata from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.constants import ObjectStatus from sentry.monitors.logic.mark_failed import mark_failed from sentry.monitors.schedule import get_prev_schedule @@ -50,7 +52,7 @@ def _get_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_INGEST_MONITORS)["cluster"] + cluster_name = get_topic_definition(Topic.INGEST_MONITORS)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -62,10 +64,10 @@ def _get_producer() -> KafkaProducer: @lru_cache(maxsize=None) def _get_partitions() -> Mapping[int, PartitionMetadata]: - topic = settings.KAFKA_INGEST_MONITORS - cluster_name = get_topic_definition(topic)["cluster"] + topic_defn = get_topic_definition(Topic.INGEST_MONITORS) + topic = topic_defn["real_topic_name"] - conf = get_kafka_admin_cluster_options(cluster_name) + conf = get_kafka_admin_cluster_options(topic_defn["cluster"]) admin_client = AdminClient(conf) result = admin_client.list_topics(topic) topic_metadata = result.topics.get(topic) @@ -203,7 +205,7 @@ def clock_pulse(current_datetime=None): # topic. This is a requirement to ensure that none of the partitions stall, # since the global clock is tied to the slowest partition. for partition in _get_partitions().values(): - dest = Partition(Topic(settings.KAFKA_INGEST_MONITORS), partition.id) + dest = Partition(ArroyoTopic(settings.KAFKA_INGEST_MONITORS), partition.id) _checkin_producer.produce(dest, payload) diff --git a/src/sentry/notifications/utils/__init__.py b/src/sentry/notifications/utils/__init__.py index 3829556a5809a0..58aca93029dcfd 100644 --- a/src/sentry/notifications/utils/__init__.py +++ b/src/sentry/notifications/utils/__init__.py @@ -16,7 +16,7 @@ from sentry import integrations from sentry.eventstore.models import Event, GroupEvent -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.integrations import IntegrationFeatures, IntegrationProvider from sentry.issues.grouptype import ( PerformanceConsecutiveDBQueriesGroupType, @@ -458,9 +458,9 @@ def to_dict(self) -> dict[str, str | float | list[str]]: "transaction_name": self.transaction, "parent_span": get_span_evidence_value(self.parent_span), "repeating_spans": get_span_evidence_value(self.repeating_spans), - "num_repeating_spans": str(len(self.problem.offender_span_ids)) - if self.problem.offender_span_ids - else "", + "num_repeating_spans": ( + str(len(self.problem.offender_span_ids)) if self.problem.offender_span_ids else "" + ), } @property @@ -531,9 +531,9 @@ def to_dict(self) -> dict[str, str | float | list[str]]: "transaction_name": self.transaction, "repeating_spans": self.path_prefix, "parameters": self.parameters, - "num_repeating_spans": str(len(self.problem.offender_span_ids)) - if self.problem.offender_span_ids - else "", + "num_repeating_spans": ( + str(len(self.problem.offender_span_ids)) if self.problem.offender_span_ids else "" + ), } @property diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 4300c4168dca7f..e0dc7921309729 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -964,6 +964,9 @@ # Drop delete_old_primary_hash messages for a particular project. register("reprocessing2.drop-delete-old-primary-hash", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) +# Switch to use service wrapper for reprocessing redis operations +register("reprocessing.use_store", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE) + # BEGIN ABUSE QUOTAS # Example: @@ -1650,6 +1653,10 @@ register("hybrid_cloud.region-domain-allow-list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) register("hybrid_cloud.region-user-allow-list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) +register( + "hybrid_cloud.use_region_specific_upload_url", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE +) + # Retry controls register("hybridcloud.regionsiloclient.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) register("hybridcloud.rpc.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) diff --git a/src/sentry/pipeline/base.py b/src/sentry/pipeline/base.py index 7613b8d5bf8131..487286c09af2e8 100644 --- a/src/sentry/pipeline/base.py +++ b/src/sentry/pipeline/base.py @@ -85,7 +85,9 @@ def unpack_state(cls, request: HttpRequest) -> PipelineRequestState | None: organization: RpcOrganization | None = None if state.org_id: - org_context = organization_service.get_organization_by_id(id=state.org_id) + org_context = organization_service.get_organization_by_id( + id=state.org_id, include_teams=False + ) if org_context: organization = org_context.organization diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index 7d31ff7055dab7..86c53714c608a8 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -3,6 +3,7 @@ from collections.abc import Mapping, MutableMapping from copy import deepcopy from datetime import datetime, timezone +from functools import lru_cache from time import time from typing import Any @@ -21,6 +22,7 @@ from sentry.models.eventerror import EventError from sentry.models.organization import Organization from sentry.models.project import Project +from sentry.models.projectkey import ProjectKey, UseCase from sentry.profiles.device import classify_device from sentry.profiles.java import deobfuscate_signature, format_signature from sentry.profiles.utils import get_from_profiling_service @@ -136,6 +138,20 @@ def process_profile_task( set_measurement("profile.stacks.processed", len(profile["profile"]["stacks"])) set_measurement("profile.frames.processed", len(profile["profile"]["frames"])) + if options.get( + "profiling.generic_metrics.functions_ingestion.enabled" + ) and project.organization_id in options.get( + "profiling.generic_metrics.functions_ingestion.allowed_org_ids" + ): + try: + with metrics.timer("process_profile.get_metrics_dsn"): + dsn = get_metrics_dsn(project.id) + profile["options"] = { + "dsn": dsn, + } + except Exception as e: + sentry_sdk.capture_exception(e) + if not _push_profile_to_vroom(profile, project): return @@ -898,3 +914,11 @@ def clean_android_js_profile(profile: Profile): del p["event_id"] del p["release"] del p["dist"] + + +@lru_cache(maxsize=100) +def get_metrics_dsn(project_id: int) -> str: + project_key, _ = ProjectKey.objects.get_or_create( + project_id=project_id, use_case=UseCase.PROFILING.value + ) + return project_key.get_dsn(public=True) diff --git a/src/sentry/ratelimits/redis.py b/src/sentry/ratelimits/redis.py index 90c3a834085398..613a1911cb5d45 100644 --- a/src/sentry/ratelimits/redis.py +++ b/src/sentry/ratelimits/redis.py @@ -106,8 +106,11 @@ def is_limited_with_value( # Reset Time = next time bucket's start time reset_time = _bucket_start_time(_time_bucket(request_time, window) + 1, window) try: - result = self.client.incr(redis_key) - self.client.expire(redis_key, expiration) + pipe = self.client.pipeline() + pipe.incr(redis_key) + pipe.expire(redis_key, expiration) + pipeline_result = pipe.execute() + result = pipeline_result[0] except RedisError: # We don't want rate limited endpoints to fail when ratelimits # can't be updated. We do want to know when that happens. diff --git a/src/sentry/receivers/features.py b/src/sentry/receivers/features.py index e653644ed170ed..db56bf7f2c7f88 100644 --- a/src/sentry/receivers/features.py +++ b/src/sentry/receivers/features.py @@ -122,7 +122,7 @@ def record_event_processed(project, event, **kwargs): # Check to make sure more the ip address is being sent. # testing for this in test_no_user_tracking_for_ip_address_only # list(d.keys()) pattern is to make this python3 safe - if user_context and list(user_context.keys()) != ["ip_address"]: + if user_context and len(user_context.keys() - {"ip_address", "sentry_user"}) > 0: feature_slugs.append("user_tracking") # Custom Tags diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index dccb25aa28241b..4723904522f0a9 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -14,7 +14,7 @@ from sentry import features, options from sentry.api.endpoints.project_transaction_threshold import DEFAULT_THRESHOLD from sentry.api.utils import get_date_range_from_params -from sentry.incidents.models import AlertRule, AlertRuleStatus +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleStatus from sentry.models.dashboard_widget import ( ON_DEMAND_ENABLED_KEY, DashboardWidgetQuery, diff --git a/src/sentry/replays/lib/kafka.py b/src/sentry/replays/lib/kafka.py index 26ab2368e649cc..2bde967b5faf01 100644 --- a/src/sentry/replays/lib/kafka.py +++ b/src/sentry/replays/lib/kafka.py @@ -1,5 +1,4 @@ -from django.conf import settings - +from sentry.conf.types.kafka_definition import Topic from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition from sentry.utils.pubsub import KafkaPublisher @@ -10,7 +9,7 @@ def initialize_replays_publisher(is_async=False) -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) + config = get_topic_definition(Topic.INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( get_kafka_producer_cluster_options(config["cluster"]), asynchronous=is_async, diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index 80b626bb20c987..d97babe7532a8a 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -6,17 +6,12 @@ import uuid from collections.abc import Generator from hashlib import md5 -from typing import Any, Literal, TypedDict, cast - -from django.conf import settings +from typing import Any, Literal, TypedDict from sentry import features +from sentry.conf.types.kafka_definition import Topic from sentry.models.project import Project -from sentry.replays.usecases.ingest.events import SentryEvent -from sentry.replays.usecases.ingest.issue_creation import ( - report_rage_click_issue, - report_rage_click_issue_with_replay_event, -) +from sentry.replays.usecases.ingest.issue_creation import report_rage_click_issue_with_replay_event from sentry.utils import json, kafka_config, metrics from sentry.utils.pubsub import KafkaPublisher @@ -219,7 +214,7 @@ def _initialize_publisher() -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = kafka_config.get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) + config = kafka_config.get_topic_definition(Topic.INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( kafka_config.get_kafka_producer_cluster_options(config["cluster"]) ) @@ -403,10 +398,6 @@ def _handle_breadcrumb( payload["data"]["node"], replay_event, ) - else: - report_rage_click_issue.delay( - project_id, replay_id, cast(SentryEvent, event) - ) # Log the event for tracking. log = event["data"].get("payload", {}).copy() log["project_id"] = project_id diff --git a/src/sentry/replays/usecases/ingest/issue_creation.py b/src/sentry/replays/usecases/ingest/issue_creation.py index 0985253646429a..c2ec8fc642c220 100644 --- a/src/sentry/replays/usecases/ingest/issue_creation.py +++ b/src/sentry/replays/usecases/ingest/issue_creation.py @@ -5,12 +5,7 @@ from sentry.constants import MAX_CULPRIT_LENGTH from sentry.issues.grouptype import ReplayRageClickType from sentry.issues.issue_occurrence import IssueEvidence -from sentry.models.project import Project -from sentry.replays.query import query_replay_instance -from sentry.replays.usecases.ingest.events import SentryEvent from sentry.replays.usecases.issue import new_issue_occurrence -from sentry.silo.base import SiloMode -from sentry.tasks.base import instrumented_task from sentry.utils import metrics logger = logging.getLogger() @@ -19,73 +14,6 @@ RAGE_CLICK_LEVEL = "error" -@instrumented_task( - name="sentry.replays.usecases.ingest.issue_creation.report_rage_click_issue", - queue="replays.ingest_replay", - default_retry_delay=5, - max_retries=5, - silo_mode=SiloMode.REGION, -) -def report_rage_click_issue(project_id: int, replay_id: str, event: SentryEvent): - metrics.incr("replay.rage_click_issue_creation") - payload = event["data"]["payload"] - - project = Project.objects.get(id=project_id) - - # Seconds since epoch is UTC. - timestamp = datetime.datetime.fromtimestamp(payload["timestamp"]) - timestamp = timestamp.replace(tzinfo=datetime.UTC) - - replay_info_list = query_replay_instance( - project_id=project_id, - replay_id=replay_id, - start=timestamp - datetime.timedelta(hours=1), - end=timestamp, - organization=project.organization, - ) - if not replay_info_list or len(replay_info_list) == 0: - metrics.incr("replay.rage_click_issue_creation.no_replay_info") - return - - replay_info = replay_info_list[0] - - selector = payload["message"] - clicked_element = selector.split(" > ")[-1] - new_issue_occurrence( - culprit=payload["data"]["url"][:MAX_CULPRIT_LENGTH], - environment=replay_info["agg_environment"], - fingerprint=[selector], - issue_type=ReplayRageClickType, - level=RAGE_CLICK_LEVEL, - platform="javascript", - project_id=project_id, - subtitle=selector, - timestamp=timestamp, - title=RAGE_CLICK_TITLE, - evidence_data={ - # RRWeb node data of clicked element. - "node": payload["data"]["node"], - # CSS selector path to clicked element. - "selector": selector, - }, - evidence_display=[ - IssueEvidence(name="Clicked Element", value=clicked_element, important=True), - IssueEvidence(name="Selector Path", value=selector, important=True), - ], - extra_event_data={ - "contexts": {"replay": {"replay_id": replay_id}}, - "level": RAGE_CLICK_LEVEL, - "tags": {"replayId": replay_id, "url": payload["data"]["url"]}, - "user": { - "id": replay_info["user_id"], - "username": replay_info["user_username"], - "email": replay_info["user_email"], - "ip_address": replay_info["user_ip"], - }, - }, - ) - - def report_rage_click_issue_with_replay_event( project_id: int, replay_id: str, diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index 75586690ea6bb5..368fa6d173903d 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -97,6 +97,7 @@ from sentry.deletions.defaults.group import DIRECT_GROUP_RELATED_MODELS from sentry.eventstore.models import Event from sentry.eventstore.processing import event_processing_store +from sentry.eventstore.reprocessing import reprocessing_store from sentry.models.eventattachment import EventAttachment from sentry.snuba.dataset import Dataset from sentry.utils import json, metrics, snuba @@ -140,6 +141,8 @@ Literal["attachment.not_found"], ] +use_store_option = "reprocessing.use_store" + class CannotReprocess(Exception): def __init__(self, reason: CannotReprocessReason): @@ -265,10 +268,15 @@ def _send_delete_old_primary_hash_messages( # Events for a group are split and bucketed by their primary hashes. If flushing is to be # performed on a per-group basis, the event count needs to be summed up across all buckets # belonging to a single group. - event_count = 0 - for primary_hash in old_primary_hashes: - key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) - event_count += client.llen(key) + if options.get(use_store_option): + event_count = reprocessing_store.event_count_for_hashes( + project_id, group_id, old_primary_hashes + ) + else: + event_count = 0 + for primary_hash in old_primary_hashes: + key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) + event_count += client.llen(key) if ( not force_flush_batch @@ -277,8 +285,13 @@ def _send_delete_old_primary_hash_messages( return for primary_hash in old_primary_hashes: - event_key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) - event_ids, from_date, to_date = pop_batched_events_from_redis(event_key) + if options.get(use_store_option): + event_ids, from_date, to_date = reprocessing_store.pop_batched_events( + project_id, group_id, primary_hash + ) + else: + event_key = _get_old_primary_hash_subset_key(project_id, group_id, primary_hash) + event_ids, from_date, to_date = pop_batched_events_from_redis(event_key) # Racing might be happening between two different tasks. Give up on the # task that's lagging behind by prematurely terminating flushing. @@ -366,21 +379,33 @@ def buffered_delete_old_primary_hash( client = _get_sync_redis_client() - # This is a meta key that contains old primary hashes. These hashes are then - # combined with other values to construct a key that points to a list of - # tombstonable events. - primary_hash_set_key = f"re2:tombstone-primary-hashes:{project_id}:{group_id}" - old_primary_hashes = client.smembers(primary_hash_set_key) + if options.get(use_store_option): + old_primary_hashes = reprocessing_store.get_old_primary_hashes(project_id, group_id) + else: + # This is a meta key that contains old primary hashes. These hashes are then + # combined with other values to construct a key that points to a list of + # tombstonable events. + primary_hash_set_key = f"re2:tombstone-primary-hashes:{project_id}:{group_id}" + old_primary_hashes = client.smembers(primary_hash_set_key) if old_primary_hash is not None and old_primary_hash != current_primary_hash: - event_key = _get_old_primary_hash_subset_key(project_id, group_id, old_primary_hash) - client.lpush(event_key, f"{to_timestamp(datetime)};{event_id}") - client.expire(event_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) + if options.get(use_store_option): + reprocessing_store.expire_hash( + project_id, group_id, event_id, datetime, old_primary_hash + ) + else: + event_key = _get_old_primary_hash_subset_key(project_id, group_id, old_primary_hash) + client.lpush(event_key, f"{to_timestamp(datetime)};{event_id}") + client.expire(event_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) if old_primary_hash not in old_primary_hashes: old_primary_hashes.add(old_primary_hash) - client.sadd(primary_hash_set_key, old_primary_hash) - client.expire(primary_hash_set_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) + if options.get(use_store_option): + reprocessing_store.add_hash(project_id, group_id, old_primary_hash) + else: + primary_hash_set_key = f"re2:tombstone-primary-hashes:{project_id}:{group_id}" + client.sadd(primary_hash_set_key, old_primary_hash) + client.expire(primary_hash_set_key, settings.SENTRY_REPROCESSING_TOMBSTONES_TTL) with sentry_sdk.configure_scope() as scope: scope.set_tag("project_id", project_id) @@ -474,32 +499,49 @@ def buffered_handle_remaining_events( more than counters. """ + key = None client = _get_sync_redis_client() - # We explicitly cluster by only project_id and group_id here such that our - # RENAME command later succeeds. - key = f"re2:remaining:{{{project_id}:{old_group_id}}}" - - if datetime_to_event: - llen = client.lpush( - key, - *(f"{to_timestamp(datetime)};{event_id}" for datetime, event_id in datetime_to_event), + + if options.get(use_store_option): + llen = reprocessing_store.get_remaining_event_count( + project_id, old_group_id, datetime_to_event ) - client.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) else: - llen = client.llen(key) + # We explicitly cluster by only project_id and group_id here such that our + # RENAME command later succeeds. + key = f"re2:remaining:{{{project_id}:{old_group_id}}}" + + if datetime_to_event: + llen = client.lpush( + key, + *( + f"{to_timestamp(datetime)};{event_id}" + for datetime, event_id in datetime_to_event + ), + ) + client.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) + else: + llen = client.llen(key) if force_flush_batch or llen > settings.SENTRY_REPROCESSING_REMAINING_EVENTS_BUF_SIZE: - new_key = f"{key}:{uuid.uuid4().hex}" - - try: - # Rename `key` to a new temp key that is passed to celery task. We - # use `renamenx` instead of `rename` only to detect UUID collisions. - assert client.renamenx(key, new_key), "UUID collision for new_key?" - except redis.exceptions.ResponseError: - # `key` does not exist in Redis. `ResponseError` is a bit too broad - # but it seems we'd have to do string matching on error message - # otherwise. - return + + if options.get(use_store_option): + new_key = reprocessing_store.rename_key(project_id, old_group_id) + if not new_key: + return + else: + assert key, "Key must exist in this branch" + new_key = f"{key}:{uuid.uuid4().hex}" + + try: + # Rename `key` to a new temp key that is passed to celery task. We + # use `renamenx` instead of `rename` only to detect UUID collisions. + assert client.renamenx(key, new_key), "UUID collision for new_key?" + except redis.exceptions.ResponseError: + # `key` does not exist in Redis. `ResponseError` is a bit too broad + # but it seems we'd have to do string matching on error message + # otherwise. + return from sentry.tasks.reprocessing2 import handle_remaining_events @@ -555,15 +597,22 @@ def mark_event_reprocessed(data=None, group_id=None, project_id=None, num_events project_id = data["project"] - client = _get_sync_redis_client() - # refresh the TTL of the metadata: - client.expire(_get_info_reprocessed_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL) - key = _get_sync_counter_key(group_id) - client.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) - if client.decrby(key, num_events) == 0: - from sentry.tasks.reprocessing2 import finish_reprocessing + if options.get(use_store_option): + result = reprocessing_store.mark_event_reprocessed(group_id, num_events) + if result: + from sentry.tasks.reprocessing2 import finish_reprocessing - finish_reprocessing.delay(project_id=project_id, group_id=group_id) + finish_reprocessing.delay(project_id=project_id, group_id=group_id) + else: + client = _get_sync_redis_client() + # refresh the TTL of the metadata: + client.expire(_get_info_reprocessed_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL) + key = _get_sync_counter_key(group_id) + client.expire(key, settings.SENTRY_REPROCESSING_SYNC_TTL) + if client.decrby(key, num_events) == 0: + from sentry.tasks.reprocessing2 import finish_reprocessing + + finish_reprocessing.delay(project_id=project_id, group_id=group_id) def start_group_reprocessing( @@ -647,15 +696,20 @@ def start_group_reprocessing( # New Activity Timestamp date_created = new_activity.datetime - client = _get_sync_redis_client() - client.setex(_get_sync_counter_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL, sync_count) - client.setex( - _get_info_reprocessed_key(group_id), - settings.SENTRY_REPROCESSING_SYNC_TTL, - json.dumps( - {"dateCreated": date_created, "syncCount": sync_count, "totalEvents": event_count} - ), - ) + if options.get(use_store_option): + reprocessing_store.start_reprocessing(group_id, date_created, sync_count, event_count) + else: + client = _get_sync_redis_client() + client.setex( + _get_sync_counter_key(group_id), settings.SENTRY_REPROCESSING_SYNC_TTL, sync_count + ) + client.setex( + _get_info_reprocessed_key(group_id), + settings.SENTRY_REPROCESSING_SYNC_TTL, + json.dumps( + {"dateCreated": date_created, "syncCount": sync_count, "totalEvents": event_count} + ), + ) return new_group.id @@ -670,11 +724,15 @@ def is_group_finished(group_id): def get_progress(group_id, project_id=None): - client = _get_sync_redis_client() - pending_key = _get_sync_counter_key(group_id) - pending = client.get(pending_key) - ttl = client.ttl(pending_key) - info = client.get(_get_info_reprocessed_key(group_id)) + if options.get(use_store_option): + pending, ttl = reprocessing_store.get_pending(group_id) + info = reprocessing_store.get_progress(group_id) + else: + client = _get_sync_redis_client() + pending_key = _get_sync_counter_key(group_id) + pending = client.get(pending_key) + ttl = client.ttl(pending_key) + info = client.get(_get_info_reprocessed_key(group_id)) if pending is None: logger.error("reprocessing2.missing_counter") return 0, None diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py index 757d0b5a26d639..035a793bffa920 100644 --- a/src/sentry/rules/actions/notify_event_service.py +++ b/src/sentry/rules/actions/notify_event_service.py @@ -10,7 +10,8 @@ from sentry.api.serializers.models.app_platform_event import AppPlatformEvent from sentry.api.serializers.models.incident import IncidentSerializer from sentry.eventstore.models import GroupEvent -from sentry.incidents.models import AlertRuleTriggerAction, Incident, IncidentStatus +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction +from sentry.incidents.models.incident import Incident, IncidentStatus from sentry.integrations.metric_alerts import incident_attachment_info from sentry.plugins.base import plugins from sentry.rules import EventState diff --git a/src/sentry/runner/commands/createuser.py b/src/sentry/runner/commands/createuser.py index 7d6f0662776968..906d41ad98f5d5 100644 --- a/src/sentry/runner/commands/createuser.py +++ b/src/sentry/runner/commands/createuser.py @@ -151,7 +151,9 @@ def createuser(emails, org_id, password, superuser, staff, no_password, no_input # Get the org if specified, otherwise use the default. if org_id: - org_context = organization_service.get_organization_by_id(id=org_id) + org_context = organization_service.get_organization_by_id( + id=org_id, include_teams=False, include_projects=False + ) if org_context is None: raise Exception("Organization ID not found") org = org_context.organization diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index 038a66bfa72238..6e00c6890af293 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -366,12 +366,11 @@ def devserver( from sentry.conf.types.kafka_definition import Topic from sentry.utils.batching_kafka_consumer import create_topics + from sentry.utils.kafka_config import get_topic_definition for topic in Topic: - default_name = topic.value - physical_name = settings.KAFKA_TOPIC_OVERRIDES.get(default_name, default_name) - cluster_name = settings.KAFKA_TOPIC_TO_CLUSTER[default_name] - create_topics(cluster_name, [physical_name]) + topic_defn = get_topic_definition(topic) + create_topics(topic_defn["cluster"], [topic_defn["real_topic_name"]]) if dev_consumer: daemons.append( diff --git a/src/sentry/search/events/builder/metrics.py b/src/sentry/search/events/builder/metrics.py index 4ff57eb5e3f541..aa57b8c871ab5c 100644 --- a/src/sentry/search/events/builder/metrics.py +++ b/src/sentry/search/events/builder/metrics.py @@ -1029,18 +1029,23 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any: groupbys = self.groupby if not groupbys and self.use_on_demand: # Need this otherwise top_events returns only 1 item - groupbys = [Column(col) for col in self._get_group_bys()] - groupby_aliases = [ - ( - groupby.alias - if isinstance(groupby, (AliasedExpression, CurriedFunction)) - else groupby.name - ) - for groupby in groupbys - if not ( - isinstance(groupby, CurriedFunction) and groupby.function == "team_key_transaction" - ) - ] + groupbys = [self.resolve_column(col) for col in self._get_group_bys()] + # Later the query is made by passing these columns to metrics layer so we can just have the aliases be the + # raw groupbys + groupby_aliases = self._get_group_bys() + else: + groupby_aliases = [ + ( + groupby.alias + if isinstance(groupby, (AliasedExpression, CurriedFunction)) + else groupby.name + ) + for groupby in groupbys + if not ( + isinstance(groupby, CurriedFunction) + and groupby.function == "team_key_transaction" + ) + ] # The typing for these are weak (all using Any) since the results from snuba can contain an assortment of types value_map: dict[str, Any] = defaultdict(dict) groupby_values: list[Any] = [] diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index 4194f2cc08a451..ebf18f77d23fa7 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -282,7 +282,7 @@ class ThresholdDict(TypedDict): "measurements.score.weight.cls": "d:transactions/measurements.score.weight.cls@ratio", "measurements.score.weight.fcp": "d:transactions/measurements.score.weight.fcp@ratio", "measurements.score.weight.ttfb": "d:transactions/measurements.score.weight.ttfb@ratio", - "measurements.inp": "d:spans/webvital.inp@ratio", + "measurements.inp": "d:spans/webvital.inp@millisecond", "measurements.score.inp": "d:spans/webvital.score.inp@ratio", "measurements.score.weight.inp": "d:spans/webvital.score.weight.inp@ratio", "spans.browser": "d:transactions/breakdowns.span_ops.ops.browser@millisecond", @@ -316,6 +316,7 @@ class ThresholdDict(TypedDict): "transaction.method", "transaction.op", "transaction.status", + "span.op", } SPAN_METRICS_MAP = { "user": "s:spans/user@none", diff --git a/src/sentry/search/events/datasets/discover.py b/src/sentry/search/events/datasets/discover.py index 52a084d44967cf..b7c9673a5b202a 100644 --- a/src/sentry/search/events/datasets/discover.py +++ b/src/sentry/search/events/datasets/discover.py @@ -1007,9 +1007,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: SnQLFunction( "example", required_args=[NumericColumn("column")], - snql_aggregate=lambda args, alias: function_aliases.resolve_random_sample( - ["timestamp", "span_id", args["column"].name], alias - ), + snql_aggregate=self._resolve_random_sample, private=True, ), SnQLFunction( @@ -1813,6 +1811,20 @@ def _resolve_count_scores_function(self, args: Mapping[str, Column], alias: str) alias, ) + def _resolve_random_sample( + self, + args: Mapping[str, str | Column | SelectType | int | float], + alias: str, + ) -> SelectType: + offset = 0 if self.builder.offset is None else self.builder.offset.offset + limit = 0 if self.builder.limit is None else self.builder.limit.limit + return function_aliases.resolve_random_sample( + ["timestamp", "span_id", args["column"].name], + alias, + offset, + limit, + ) + # Query Filters def _project_slug_filter_converter(self, search_filter: SearchFilter) -> WhereType | None: return filter_aliases.project_slug_converter(self.builder, search_filter) diff --git a/src/sentry/search/events/datasets/function_aliases.py b/src/sentry/search/events/datasets/function_aliases.py index f6d604c92286db..b6280f519211e8 100644 --- a/src/sentry/search/events/datasets/function_aliases.py +++ b/src/sentry/search/events/datasets/function_aliases.py @@ -13,6 +13,7 @@ from sentry.search.events.types import SelectType from sentry.sentry_metrics.configuration import UseCaseKey from sentry.sentry_metrics.use_case_id_registry import UseCaseID +from sentry.utils.hashlib import fnv1a_32 def resolve_project_threshold_config( @@ -344,7 +345,9 @@ def resolve_rounded_timestamp(interval: int, alias: str, timestamp_column: str = ) -def resolve_random_sample(columns: list[str], alias: str, seed: int = 1): +def resolve_random_sample(columns: list[str], alias: str, offset: int, limit: int): + seed_str = f"{offset}-{limit}" + seed = fnv1a_32(seed_str.encode("utf-8")) return Function( "arrayElement", [ diff --git a/src/sentry/search/events/datasets/metrics_summaries.py b/src/sentry/search/events/datasets/metrics_summaries.py index f6ce6160d0bc06..a52e0c04eb3851 100644 --- a/src/sentry/search/events/datasets/metrics_summaries.py +++ b/src/sentry/search/events/datasets/metrics_summaries.py @@ -2,7 +2,7 @@ from collections.abc import Callable, Mapping -from snuba_sdk import And, Condition, Direction, Function, Op, OrderBy +from snuba_sdk import And, Column, Condition, Direction, Function, Op, OrderBy from sentry.api.event_search import SearchFilter from sentry.search.events import builder, constants @@ -31,6 +31,7 @@ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]: return { constants.PROJECT_ALIAS: self._resolve_project_slug_alias, constants.PROJECT_NAME_ALIAS: self._resolve_project_slug_alias, + "avg_metric": self._resolve_avg_alias, } @property @@ -40,18 +41,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: for function in [ SnQLFunction( "example", - snql_aggregate=lambda args, alias: function_aliases.resolve_random_sample( - [ - "group", - "end_timestamp", - "span_id", - "min", - "max", - "sum", - "count", - ], - alias, - ), + snql_aggregate=self._resolve_random_sample, private=True, ), SnQLFunction( @@ -91,3 +81,32 @@ def _metric_filter_converter(self, search_filter: SearchFilter) -> WhereType | N def _resolve_project_slug_alias(self, alias: str) -> SelectType: return field_aliases.resolve_project_slug_alias(self.builder, alias) + + def _resolve_avg_alias(self, alias: str) -> SelectType: + return Function( + "divide", + [self.builder.column("sum_metric"), self.builder.column("count_metric")], + alias, + ) + + def _resolve_random_sample( + self, + args: Mapping[str, str | Column | SelectType | int | float], + alias: str, + ) -> SelectType: + offset = 0 if self.builder.offset is None else self.builder.offset.offset + limit = 0 if self.builder.limit is None else self.builder.limit.limit + return function_aliases.resolve_random_sample( + [ + "group", + "end_timestamp", + "span_id", + "min", + "max", + "sum", + "count", + ], + alias, + offset, + limit, + ) diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py index 5a8c6b5286a72c..857774a86fd0c7 100644 --- a/src/sentry/search/events/datasets/spans_indexed.py +++ b/src/sentry/search/events/datasets/spans_indexed.py @@ -196,9 +196,7 @@ def function_converter(self) -> Mapping[str, SnQLFunction]: ), SnQLFunction( "example", - snql_aggregate=lambda args, alias: function_aliases.resolve_random_sample( - ["group", "timestamp", "span_id"], alias - ), + snql_aggregate=self._resolve_random_sample, private=True, ), SnQLFunction( @@ -340,3 +338,17 @@ def _resolve_percentile( alias, ) ) + + def _resolve_random_sample( + self, + args: Mapping[str, str | Column | SelectType | int | float], + alias: str, + ) -> SelectType: + offset = 0 if self.builder.offset is None else self.builder.offset.offset + limit = 0 if self.builder.limit is None else self.builder.limit.limit + return function_aliases.resolve_random_sample( + ["group", "timestamp", "span_id"], + alias, + offset, + limit, + ) diff --git a/src/sentry/sentry_metrics/aggregation_option_registry.py b/src/sentry/sentry_metrics/aggregation_option_registry.py index b67afda1b9dac1..634fa416e8b735 100644 --- a/src/sentry/sentry_metrics/aggregation_option_registry.py +++ b/src/sentry/sentry_metrics/aggregation_option_registry.py @@ -9,20 +9,27 @@ class AggregationOption(Enum): TEN_SECOND = "ten_second" +class TimeWindow(Enum): + SEVEN_DAYS = "7d" + FOURTEEN_DAYS = "14d" + THIRTY_DAYS = "30d" + NINETY_DAYS = "90d" + + METRIC_ID_AGG_OPTION = { - "d:transactions/measurements.fcp@millisecond": AggregationOption.HIST, - "d:transactions/measurements.lcp@millisecond": AggregationOption.HIST, + "d:transactions/measurements.fcp@millisecond": {AggregationOption.HIST: TimeWindow.NINETY_DAYS}, + "d:transactions/measurements.lcp@millisecond": {AggregationOption.HIST: TimeWindow.NINETY_DAYS}, } -USE_CASE_AGG_OPTION = {UseCaseID.CUSTOM: AggregationOption.TEN_SECOND} +USE_CASE_AGG_OPTION = {UseCaseID.CUSTOM: {AggregationOption.TEN_SECOND: TimeWindow.SEVEN_DAYS}} -def get_aggregation_option(metric_id: str) -> AggregationOption | None: - use_case_id: UseCaseID = extract_use_case_id(metric_id) +def get_aggregation_options(mri: str) -> dict[AggregationOption, TimeWindow] | None: + use_case_id: UseCaseID = extract_use_case_id(mri) # We check first if the particular metric ID has a specified aggregation - if metric_id in METRIC_ID_AGG_OPTION: - return METRIC_ID_AGG_OPTION.get(metric_id) + if mri in METRIC_ID_AGG_OPTION: + return METRIC_ID_AGG_OPTION.get(mri) # And move to the use case if not elif options.get("sentry-metrics.10s-granularity") and (use_case_id in USE_CASE_AGG_OPTION): return USE_CASE_AGG_OPTION[use_case_id] diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py index eddebed13a3220..a885712f379d64 100644 --- a/src/sentry/sentry_metrics/configuration.py +++ b/src/sentry/sentry_metrics/configuration.py @@ -10,6 +10,8 @@ import sentry_sdk +from sentry.conf.types.kafka_definition import Topic + # The maximum length of a column that is indexed in postgres. It is important to keep this in # sync between the consumers and the models defined in src/sentry/sentry_metrics/models.py MAX_INDEXED_COLUMN_LENGTH = 200 @@ -46,7 +48,7 @@ class MetricsIngestConfiguration: db_backend: IndexerStorage db_backend_options: Mapping[str, Any] input_topic: str - output_topic: str + output_topic: Topic use_case_id: UseCaseKey internal_metrics_tag: str | None writes_limiter_cluster_options: Mapping[str, Any] @@ -79,7 +81,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_METRICS, - output_topic=settings.KAFKA_SNUBA_METRICS, + output_topic=Topic.SNUBA_METRICS, use_case_id=UseCaseKey.RELEASE_HEALTH, internal_metrics_tag="release-health", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS, @@ -96,7 +98,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_PERFORMANCE_METRICS, - output_topic=settings.KAFKA_SNUBA_GENERIC_METRICS, + output_topic=Topic.SNUBA_GENERIC_METRICS, use_case_id=UseCaseKey.PERFORMANCE, internal_metrics_tag="perf", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS_PERFORMANCE, diff --git a/src/sentry/sentry_metrics/consumers/indexer/batch.py b/src/sentry/sentry_metrics/consumers/indexer/batch.py index 9bc1960df40bc2..3d3b1251d9b2d5 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/batch.py +++ b/src/sentry/sentry_metrics/consumers/indexer/batch.py @@ -17,7 +17,7 @@ from sentry_kafka_schemas.schema_types.snuba_metrics_v1 import Metric from sentry import options -from sentry.sentry_metrics.aggregation_option_registry import get_aggregation_option +from sentry.sentry_metrics.aggregation_option_registry import get_aggregation_options from sentry.sentry_metrics.configuration import MAX_INDEXED_COLUMN_LENGTH from sentry.sentry_metrics.consumers.indexer.common import ( BrokerMeta, @@ -66,7 +66,7 @@ class IndexerBatchMetrics: max_tags_len: int = 0 max_value_len: int = 0 - def add_metric(self, num_bytes: int, tags_len: int, value_len: int): + def add_metric(self, num_bytes: int, tags_len: int, value_len: int) -> None: self.message_count += 1 self.total_bytes += num_bytes self.total_tags_len += tags_len @@ -75,13 +75,13 @@ def add_metric(self, num_bytes: int, tags_len: int, value_len: int): self.max_tags_len = max(self.max_tags_len, tags_len) self.max_value_len = max(self.max_value_len, value_len) - def avg_bytes(self): + def avg_bytes(self) -> float: return self.total_bytes / self.message_count - def avg_tags_len(self): + def avg_tags_len(self) -> float: return self.total_tags_len / self.message_count - def avg_value_len(self): + def avg_value_len(self) -> float: return self.total_value_len / self.message_count @@ -487,8 +487,11 @@ def reconstruct_messages( "value": old_payload_value["value"], "sentry_received_timestamp": sentry_received_timestamp, } - if aggregation_option := get_aggregation_option(old_payload_value["name"]): - new_payload_v2["aggregation_option"] = aggregation_option.value + if aggregation_options := get_aggregation_options(old_payload_value["name"]): + # TODO: This should eventually handle multiple aggregation options + option = list(aggregation_options.items())[0][0] + assert option is not None + new_payload_v2["aggregation_option"] = option.value new_payload_value = new_payload_v2 @@ -517,6 +520,8 @@ def reconstruct_messages( with metrics.timer("metrics_consumer.reconstruct_messages.emit_payload_metrics"): for use_case_id, metrics_by_type in self._message_metrics.items(): for metric_type, batch_metric in metrics_by_type.items(): + if batch_metric.message_count == 0: + continue metrics.incr( "metrics_consumer.process_message.messages_seen", amount=batch_metric.message_count, @@ -563,33 +568,34 @@ def reconstruct_messages( for use_case_metrics in self._message_metrics.values() for type_metrics in use_case_metrics.values() ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_size_in_batch", - sum( - type_metrics.total_bytes - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + if not num_messages == 0: + metrics.gauge( + "metrics_consumer.process_message.message.avg_size_in_batch", + sum( + type_metrics.total_bytes + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_tags_len_in_batch", - sum( - type_metrics.total_tags_len - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + metrics.gauge( + "metrics_consumer.process_message.message.avg_tags_len_in_batch", + sum( + type_metrics.total_tags_len + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_value_len_in_batch", - sum( - type_metrics.total_value_len - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + metrics.gauge( + "metrics_consumer.process_message.message.avg_value_len_in_batch", + sum( + type_metrics.total_value_len + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) return IndexerOutputMessageBatch( new_messages, diff --git a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py index dd56520a20f521..4dbd6a27f54d01 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py +++ b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py @@ -10,6 +10,7 @@ from arroyo.types import Commit, FilteredPayload, Message, Partition from confluent_kafka import Producer +from sentry.conf.types.kafka_definition import Topic from sentry.utils import kafka_config, metrics logger = logging.getLogger(__name__) @@ -18,7 +19,7 @@ class SimpleProduceStep(ProcessingStep[KafkaPayload]): def __init__( self, - output_topic: str, + output_topic: Topic, commit_function: Commit, producer: AbstractProducer[KafkaPayload] | None = None, ) -> None: @@ -26,7 +27,7 @@ def __init__( self.__producer = Producer( kafka_config.get_kafka_producer_cluster_options(snuba_metrics["cluster"]), ) - self.__producer_topic = output_topic + self.__producer_topic = snuba_metrics["real_topic_name"] self.__commit_function = commit_function self.__closed = False diff --git a/src/sentry/sentry_metrics/querying/data_v2/execution.py b/src/sentry/sentry_metrics/querying/data_v2/execution.py index 8f92c03a990dc8..e8d40fcf73e98d 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/execution.py +++ b/src/sentry/sentry_metrics/querying/data_v2/execution.py @@ -12,9 +12,9 @@ from sentry.models.project import Project from sentry.sentry_metrics.querying.common import SNUBA_QUERY_LIMIT from sentry.sentry_metrics.querying.data_v2.preparation import IntermediateQuery -from sentry.sentry_metrics.querying.data_v2.units import MeasurementUnit, UnitFamily from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError from sentry.sentry_metrics.querying.types import GroupKey, GroupsCollection, QueryOrder +from sentry.sentry_metrics.querying.units import MeasurementUnit, UnitFamily from sentry.sentry_metrics.querying.visitors import ( QueriedMetricsVisitor, TimeseriesConditionInjectionVisitor, diff --git a/src/sentry/sentry_metrics/querying/data_v2/plan.py b/src/sentry/sentry_metrics/querying/data_v2/plan.py index 4ca94865bda144..5fd89690670b7f 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/plan.py +++ b/src/sentry/sentry_metrics/querying/data_v2/plan.py @@ -2,7 +2,7 @@ from dataclasses import dataclass, replace from sentry.sentry_metrics.querying.data_v2.execution import QueryResult -from sentry.sentry_metrics.querying.data_v2.transformation.base import ( +from sentry.sentry_metrics.querying.data_v2.transformation import ( QueryTransformer, QueryTransformerResult, ) diff --git a/src/sentry/sentry_metrics/querying/data_v2/preparation.py b/src/sentry/sentry_metrics/querying/data_v2/preparation.py deleted file mode 100644 index 7b929fdfcef45c..00000000000000 --- a/src/sentry/sentry_metrics/querying/data_v2/preparation.py +++ /dev/null @@ -1,85 +0,0 @@ -from abc import ABC, abstractmethod -from dataclasses import dataclass, replace - -from snuba_sdk import MetricsQuery, Timeseries - -from sentry.sentry_metrics.querying.data_v2.units import ( - MeasurementUnit, - UnitFamily, - get_unit_family_and_unit, -) -from sentry.sentry_metrics.querying.types import QueryOrder -from sentry.snuba.metrics import parse_mri - - -@dataclass(frozen=True) -class IntermediateQuery: - metrics_query: MetricsQuery - order: QueryOrder | None = None - limit: int | None = None - unit_family: UnitFamily | None = None - unit: MeasurementUnit | None = None - scaling_factor: float | None = None - - -class PreparationStep(ABC): - @abstractmethod - def run(self, intermediate_queries: list[IntermediateQuery]) -> list[IntermediateQuery]: - raise NotImplementedError - - -def run_preparation_steps( - intermediate_queries: list[IntermediateQuery], *steps -) -> list[IntermediateQuery]: - for step in steps: - if isinstance(step, PreparationStep): - intermediate_queries = step.run(intermediate_queries=intermediate_queries) - - return intermediate_queries - - -class UnitNormalizationStep(PreparationStep): - - EXCLUDED_AGGREGATES = {"count", "count_unique"} - - def _extract_unit(self, timeseries: Timeseries) -> str | None: - # If the aggregate doesn't support unit normalization, we will skip it. - if timeseries.aggregate in self.EXCLUDED_AGGREGATES: - return None - - parsed_mri = parse_mri(timeseries.metric.mri) - if parsed_mri is not None: - return parsed_mri.unit - - return None - - def run(self, intermediate_queries: list[IntermediateQuery]) -> list[IntermediateQuery]: - normalized_intermediate_queries = [] - - for intermediate_query in intermediate_queries: - normalized_intermediate_query = intermediate_query - metrics_query = intermediate_query.metrics_query - # For now, we want to perform units coercion only if the query is a timeseries. - if isinstance(metrics_query.query, Timeseries): - extracted_unit = self._extract_unit(timeseries=metrics_query.query) - if extracted_unit is not None: - unit_family_and_unit = get_unit_family_and_unit(extracted_unit) - if unit_family_and_unit is not None: - ( - unit_family, - reference_unit, - unit, - ) = unit_family_and_unit - normalized_intermediate_query = replace( - intermediate_query, - metrics_query=metrics_query.set_query( - unit.apply_on_timeseries(metrics_query.query) - ), - unit_family=unit_family, - unit=reference_unit, - scaling_factor=unit.scaling_factor, - ) - - normalized_intermediate_queries.append(normalized_intermediate_query) - - return normalized_intermediate_queries diff --git a/src/sentry/sentry_metrics/querying/data_v2/preparation/__init__.py b/src/sentry/sentry_metrics/querying/data_v2/preparation/__init__.py new file mode 100644 index 00000000000000..a898bf5e93d7f5 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/preparation/__init__.py @@ -0,0 +1,4 @@ +from .base import IntermediateQuery, PreparationStep, run_preparation_steps +from .units_normalization import UnitNormalizationStep + +__all__ = ["PreparationStep", "IntermediateQuery", "run_preparation_steps", "UnitNormalizationStep"] diff --git a/src/sentry/sentry_metrics/querying/data_v2/preparation/base.py b/src/sentry/sentry_metrics/querying/data_v2/preparation/base.py new file mode 100644 index 00000000000000..72e91eed571a1c --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/preparation/base.py @@ -0,0 +1,33 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass + +from snuba_sdk import MetricsQuery + +from sentry.sentry_metrics.querying.types import QueryOrder +from sentry.sentry_metrics.querying.units import MeasurementUnit, UnitFamily + + +@dataclass(frozen=True) +class IntermediateQuery: + metrics_query: MetricsQuery + order: QueryOrder | None = None + limit: int | None = None + unit_family: UnitFamily | None = None + unit: MeasurementUnit | None = None + scaling_factor: float | None = None + + +class PreparationStep(ABC): + @abstractmethod + def run(self, intermediate_queries: list[IntermediateQuery]) -> list[IntermediateQuery]: + raise NotImplementedError + + +def run_preparation_steps( + intermediate_queries: list[IntermediateQuery], *steps +) -> list[IntermediateQuery]: + for step in steps: + if isinstance(step, PreparationStep): + intermediate_queries = step.run(intermediate_queries=intermediate_queries) + + return intermediate_queries diff --git a/src/sentry/sentry_metrics/querying/data_v2/preparation/units_normalization.py b/src/sentry/sentry_metrics/querying/data_v2/preparation/units_normalization.py new file mode 100644 index 00000000000000..5f5ab1d8a001f2 --- /dev/null +++ b/src/sentry/sentry_metrics/querying/data_v2/preparation/units_normalization.py @@ -0,0 +1,41 @@ +from dataclasses import replace + +from sentry.sentry_metrics.querying.data_v2.preparation import IntermediateQuery, PreparationStep +from sentry.sentry_metrics.querying.errors import NonNormalizableUnitsError +from sentry.sentry_metrics.querying.visitors import UnitsNormalizationVisitor + + +class UnitNormalizationStep(PreparationStep): + def _get_normalized_intermediate_query( + self, intermediate_query: IntermediateQuery + ) -> IntermediateQuery: + try: + units_normalization = UnitsNormalizationVisitor() + # We compute the new normalized query by visiting and mutating the expression tree. + normalized_query = units_normalization.visit(intermediate_query.metrics_query.query) + # We obtain the units that have been used by the visitor. + ( + unit_family, + reference_unit, + scaling_factor, + ) = units_normalization.get_units_metadata() + + return replace( + intermediate_query, + metrics_query=intermediate_query.metrics_query.set_query(normalized_query), + unit_family=unit_family, + unit=reference_unit, + scaling_factor=scaling_factor, + ) + except NonNormalizableUnitsError: + return intermediate_query + + def run(self, intermediate_queries: list[IntermediateQuery]) -> list[IntermediateQuery]: + normalized_intermediate_queries = [] + + for intermediate_query in intermediate_queries: + normalized_intermediate_queries.append( + self._get_normalized_intermediate_query(intermediate_query) + ) + + return normalized_intermediate_queries diff --git a/src/sentry/sentry_metrics/querying/data_v2/transformation/__init__.py b/src/sentry/sentry_metrics/querying/data_v2/transformation/__init__.py index 951baeb6f93f99..f90fa951e6a17f 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/transformation/__init__.py +++ b/src/sentry/sentry_metrics/querying/data_v2/transformation/__init__.py @@ -1,3 +1,4 @@ +from .base import QueryTransformer, QueryTransformerResult from .metrics_api import MetricsAPIQueryTransformer -__all__ = ["MetricsAPIQueryTransformer"] +__all__ = ["QueryTransformerResult", "QueryTransformer", "MetricsAPIQueryTransformer"] diff --git a/src/sentry/sentry_metrics/querying/data_v2/transformation/metrics_api.py b/src/sentry/sentry_metrics/querying/data_v2/transformation/metrics_api.py index a4461f575dc09c..916a7721fea3e7 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/transformation/metrics_api.py +++ b/src/sentry/sentry_metrics/querying/data_v2/transformation/metrics_api.py @@ -6,8 +6,8 @@ from sentry.search.utils import parse_datetime_string from sentry.sentry_metrics.querying.data_v2.execution import QueryResult -from sentry.sentry_metrics.querying.data_v2.transformation.base import QueryTransformer -from sentry.sentry_metrics.querying.data_v2.utils import nan_to_none +from sentry.sentry_metrics.querying.data_v2.transformation import QueryTransformer +from sentry.sentry_metrics.querying.data_v2.utils import undefined_value_to_none from sentry.sentry_metrics.querying.errors import MetricsQueryExecutionError from sentry.sentry_metrics.querying.types import GroupKey, ResultValue, Series, Totals @@ -86,7 +86,7 @@ def _generate_full_series( for time, value in series: time_seconds = parse_datetime_string(time).timestamp() index = int((time_seconds - start_seconds) / interval) - full_series[index] = nan_to_none(value) + full_series[index] = undefined_value_to_none(value) return full_series @@ -213,7 +213,7 @@ def transform(self, query_results: list[QueryResult]) -> Mapping[str, Any]: "series": _generate_full_series( int(start.timestamp()), len(intervals), interval, group_value.series ), - "totals": nan_to_none(group_value.totals), + "totals": undefined_value_to_none(group_value.totals), } ) diff --git a/src/sentry/sentry_metrics/querying/data_v2/utils.py b/src/sentry/sentry_metrics/querying/data_v2/utils.py index 0b367089051f3c..f1661782fbd680 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/utils.py +++ b/src/sentry/sentry_metrics/querying/data_v2/utils.py @@ -3,26 +3,30 @@ from sentry.sentry_metrics.querying.types import ResultValue -def nan_to_none(value: ResultValue) -> ResultValue: +def undefined_value_to_none(value: ResultValue) -> ResultValue: """ - Converts a nan value to None or returns the original value. + Converts an undefined value to None or returns the original value. """ if value is None: return None - if is_nan(value): + if is_undefined(value): return None return value -def is_nan(value: ResultValue) -> bool: +def is_undefined(value: ResultValue) -> bool: """ - Returns whether the result of a query is nan. + Returns whether the result of a query is undefined. """ if value is None: return False - elif isinstance(value, list): - return any(map(lambda e: e is not None and math.isnan(e), value)) - return math.isnan(value) + def _is_undefined(inner_value: int | float) -> bool: + return math.isnan(inner_value) or math.isinf(inner_value) + + if isinstance(value, list): + return any(map(lambda e: e is not None and _is_undefined(e), value)) + + return _is_undefined(value) diff --git a/src/sentry/sentry_metrics/querying/errors.py b/src/sentry/sentry_metrics/querying/errors.py index a8fcc10431eb59..a9742a89e76db3 100644 --- a/src/sentry/sentry_metrics/querying/errors.py +++ b/src/sentry/sentry_metrics/querying/errors.py @@ -14,5 +14,5 @@ class CorrelationsQueryExecutionError(Exception): pass -class TooManyCodeLocationsRequestedError(Exception): +class NonNormalizableUnitsError(Exception): pass diff --git a/src/sentry/sentry_metrics/querying/metadata/metrics_code_locations.py b/src/sentry/sentry_metrics/querying/metadata/metrics_code_locations.py index baf3d71b437848..ffa9bdfedba33d 100644 --- a/src/sentry/sentry_metrics/querying/metadata/metrics_code_locations.py +++ b/src/sentry/sentry_metrics/querying/metadata/metrics_code_locations.py @@ -1,12 +1,13 @@ +import math from collections.abc import Generator, Sequence from dataclasses import dataclass from datetime import datetime from sentry.models.organization import Organization from sentry.models.project import Project -from sentry.sentry_metrics.querying.errors import TooManyCodeLocationsRequestedError -from sentry.sentry_metrics.querying.utils import fnv1a_32, get_redis_client_for_metrics_meta +from sentry.sentry_metrics.querying.utils import get_redis_client_for_metrics_meta from sentry.utils import json, metrics +from sentry.utils.hashlib import fnv1a_32 DAY_IN_SECONDS = 86400 @@ -99,26 +100,28 @@ def __init__( self._redis_client = get_redis_client_for_metrics_meta() - self._validate() - - def _validate(self): - total_combinations = len(self._projects) * len(self._metric_mris) * len(self._timestamps) - if total_combinations > self.MAXIMUM_KEYS: - raise TooManyCodeLocationsRequestedError( - "The request results in too many code locations to be fetched, try to reduce the number of " - "metrics, projects or the time interval" - ) - def _code_location_queries(self) -> Generator[CodeLocationQuery, None, None]: + total_count = len(self._projects) * len(self._metric_mris) * len(self._timestamps) + step_size = ( + 1 if total_count <= self.MAXIMUM_KEYS else math.ceil(total_count / self.MAXIMUM_KEYS) + ) + + # We want to distribute evenly and deterministically the elements in the set of combinations. For example, if + # the total count of code locations queries you made is 100 and our maximum is 50, then we will sample 1 out of + # 2 elements out of the 100 queries, to be within the 50. + current_step = 0 for project in self._projects: for metric_mri in self._metric_mris: for timestamp in self._timestamps: - yield CodeLocationQuery( - organization_id=self._organization.id, - project_id=project.id, - metric_mri=metric_mri, - timestamp=timestamp, - ) + if current_step % step_size == 0: + yield CodeLocationQuery( + organization_id=self._organization.id, + project_id=project.id, + metric_mri=metric_mri, + timestamp=timestamp, + ) + + current_step += 1 def _parse_code_location_payload(self, encoded_location: str) -> CodeLocationPayload: decoded_location = json.loads(encoded_location) diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py index d18821c0c4c8ff..1ce0f7f0c1ec00 100644 --- a/src/sentry/sentry_metrics/querying/samples_list.py +++ b/src/sentry/sentry_metrics/querying/samples_list.py @@ -39,6 +39,7 @@ def __init__( params: ParamsType, snuba_params: SnubaParams, fields: list[str], + operation: str | None, query: str | None, min: float | None, max: float | None, @@ -50,6 +51,7 @@ def __init__( self.params = params self.snuba_params = snuba_params self.fields = fields + self.operation = operation self.query = query self.min = min self.max = max @@ -596,6 +598,12 @@ class CustomSamplesListExecutor(AbstractSamplesListExecutor): "timestamp": "timestamp", } + MIN_MAX_CONDITION_COLUMN = { + "min": "min_metric", + "max": "max_metric", + "count": "count_metric", + } + @classmethod def convert_sort(cls, sort) -> tuple[Literal["", "-"], str] | None: direction: Literal["", "-"] = "" @@ -660,7 +668,7 @@ def get_sorted_span_keys( ) additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions() + min_max_conditions = self.get_min_max_conditions(builder) builder.add_conditions([*additional_conditions, *min_max_conditions]) query_results = builder.run_query(self.referrer.value) @@ -720,7 +728,7 @@ def get_unsorted_span_keys( ) additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions() + min_max_conditions = self.get_min_max_conditions(builder) builder.add_conditions([*additional_conditions, *min_max_conditions]) query_results = builder.run_query(self.referrer.value) @@ -762,13 +770,17 @@ def get_additional_conditions(self, builder: QueryBuilder) -> list[Condition]: ) ] - def get_min_max_conditions(self) -> list[Condition]: + def get_min_max_conditions(self, builder: QueryBuilder) -> list[Condition]: conditions = [] + column = builder.resolve_column( + self.MIN_MAX_CONDITION_COLUMN.get(self.operation or "", "avg_metric") + ) + if self.min is not None: - conditions.append(Condition(Column("min"), Op.GTE, self.min)) + conditions.append(Condition(column, Op.GTE, self.min)) if self.max is not None: - conditions.append(Condition(Column("max"), Op.LTE, self.max)) + conditions.append(Condition(column, Op.LTE, self.max)) return conditions diff --git a/src/sentry/sentry_metrics/querying/data_v2/units.py b/src/sentry/sentry_metrics/querying/units.py similarity index 96% rename from src/sentry/sentry_metrics/querying/data_v2/units.py rename to src/sentry/sentry_metrics/querying/units.py index 0be0d69c37672a..cdc99c6387aaa7 100644 --- a/src/sentry/sentry_metrics/querying/data_v2/units.py +++ b/src/sentry/sentry_metrics/querying/units.py @@ -45,6 +45,7 @@ class UnitFamily(Enum): DURATION = "duration" INFORMATION = "information" + UNKNOWN = "unknown" @dataclass(frozen=True) @@ -122,10 +123,10 @@ class UnitsSpec: def get_unit_family_and_unit( unit: MeasurementUnit, -) -> tuple[UnitFamily, MeasurementUnit, Unit] | None: +) -> tuple[UnitFamily, MeasurementUnit | None, Unit | None]: for unit_family, units_spec in FAMILY_TO_UNITS.items(): for inner_unit in units_spec.units: if inner_unit.name == unit: return unit_family, units_spec.reference_unit, inner_unit - return None + return UnitFamily.UNKNOWN, None, None diff --git a/src/sentry/sentry_metrics/querying/utils.py b/src/sentry/sentry_metrics/querying/utils.py index 552b7f9c505721..527cf26721bef8 100644 --- a/src/sentry/sentry_metrics/querying/utils.py +++ b/src/sentry/sentry_metrics/querying/utils.py @@ -14,22 +14,6 @@ def get_redis_client_for_metrics_meta() -> RedisCluster: return redis.redis_clusters.get(cluster_key) # type: ignore[return-value] -def fnv1a_32(data: bytes) -> int: - """ - Fowler–Noll–Vo hash function 32 bit implementation. - """ - fnv_init = 0x811C9DC5 - fnv_prime = 0x01000193 - fnv_size = 2**32 - - result_hash = fnv_init - for byte in data: - result_hash ^= byte - result_hash = (result_hash * fnv_prime) % fnv_size - - return result_hash - - def remove_if_match(pattern, string: str) -> str: """ Removes a pattern from a string. diff --git a/src/sentry/sentry_metrics/querying/visitors/__init__.py b/src/sentry/sentry_metrics/querying/visitors/__init__.py index 5ffb47a5393871..b4af4db58fbeae 100644 --- a/src/sentry/sentry_metrics/querying/visitors/__init__.py +++ b/src/sentry/sentry_metrics/querying/visitors/__init__.py @@ -11,6 +11,7 @@ QueryValidationV2Visitor, QueryValidationVisitor, TimeseriesConditionInjectionVisitor, + UnitsNormalizationVisitor, UsedGroupBysVisitor, ) @@ -28,4 +29,5 @@ "QueryConditionsCompositeVisitor", "QueriedMetricsVisitor", "UsedGroupBysVisitor", + "UnitsNormalizationVisitor", ] diff --git a/src/sentry/sentry_metrics/querying/visitors/query_expression.py b/src/sentry/sentry_metrics/querying/visitors/query_expression.py index 9b03aefdfcbb9d..3602e62b0317ed 100644 --- a/src/sentry/sentry_metrics/querying/visitors/query_expression.py +++ b/src/sentry/sentry_metrics/querying/visitors/query_expression.py @@ -1,11 +1,27 @@ from collections.abc import Sequence -from snuba_sdk import AliasedExpression, Column, Condition, Formula, Op, Timeseries +from snuba_sdk import ( + AliasedExpression, + ArithmeticOperator, + Column, + Condition, + Formula, + Op, + Timeseries, +) from snuba_sdk.conditions import ConditionGroup from sentry.models.environment import Environment -from sentry.sentry_metrics.querying.errors import InvalidMetricsQueryError +from sentry.sentry_metrics.querying.errors import ( + InvalidMetricsQueryError, + NonNormalizableUnitsError, +) from sentry.sentry_metrics.querying.types import QueryExpression +from sentry.sentry_metrics.querying.units import ( + MeasurementUnit, + UnitFamily, + get_unit_family_and_unit, +) from sentry.sentry_metrics.querying.visitors.base import ( QueryConditionVisitor, QueryExpressionVisitor, @@ -258,3 +274,94 @@ def _group_bys_as_string(self, group_bys: list[Column | AliasedExpression] | Non string_group_bys.add(group_by.name) return string_group_bys + + +class UnitsNormalizationVisitor(QueryExpressionVisitor[QueryExpression]): + """ + Visitor that recursively transforms the `QueryExpression` components to have the same unit. Throws an error in + case units are incompatible. + """ + + UNITLESS_FORMULA_FUNCTIONS = { + ArithmeticOperator.DIVIDE.value, + ArithmeticOperator.MULTIPLY.value, + } + UNITLESS_AGGREGATES = {"count", "count_unique"} + + def __init__(self): + self._unit_family = None + self._reference_unit = None + self._scaling_factor = None + + self._is_formula = False + + def _visit_formula(self, formula: Formula) -> QueryExpression: + self._is_formula = True + + has_all_timeseries_params = True + parameters = [] + for parameter in formula.parameters: + if not isinstance(parameter, Timeseries): + has_all_timeseries_params = False + + parameters.append(self.visit(parameter)) + + # If we have all timeseries as parameters of a formula and the function is belonging to `*` or `/` we will + # not perform any units normalization. + # TODO: we might want to implement units normalization following a more mathematical approach like `ms^2` or + # `byte/s` but this is going to come at a later point. + if formula.function_name in self.UNITLESS_FORMULA_FUNCTIONS and has_all_timeseries_params: + raise NonNormalizableUnitsError( + "A unitless formula function is being used and has at least one " + "timeseries in one of its operands" + ) + + return formula.set_parameters(parameters) + + def _visit_timeseries(self, timeseries: Timeseries) -> QueryExpression: + extracted_unit = self._extract_unit(timeseries=timeseries) + if extracted_unit is not None: + unit_family, reference_unit, unit = get_unit_family_and_unit(extracted_unit) + # If we encounter multiple unit families in a `QueryExpression`, we want to unwind and not apply any + # units normalization. + if self._unit_family is not None and unit_family != self._unit_family: + raise NonNormalizableUnitsError("Multiple unit families are found in the formula") + + # We set the first seen unit family, irrespectively if a unit is found, since if it's not found, the family + # will be unknown. + self._unit_family = unit_family + + if reference_unit is not None and unit is not None: + self._reference_unit = reference_unit + self._scaling_factor = unit.scaling_factor + return unit.apply_on_timeseries(timeseries) + + return timeseries + + def _extract_unit(self, timeseries: Timeseries) -> str | None: + # If the aggregate doesn't support unit normalization, we will skip it. + if timeseries.aggregate in self.UNITLESS_AGGREGATES: + raise NonNormalizableUnitsError( + f"The aggregate {timeseries.aggregate} doesn't need unit normalization" + ) + + parsed_mri = parse_mri(timeseries.metric.mri) + if parsed_mri is not None: + return parsed_mri.unit + + raise NonNormalizableUnitsError( + "Units normalization can't be run if not all components have a metric mri" + ) + + def get_units_metadata( + self, + ) -> tuple[UnitFamily | None, MeasurementUnit | None, float | int | None]: + """ + Returns metadata of the units that were encountered during the traversal. + """ + # If we have a formula, we do not return the scaling factor, since a formula technically has multiple scaling + # factors, but they won't be of use to the frontend. + if self._is_formula: + return self._unit_family, self._reference_unit, None + + return self._unit_family, self._reference_unit, self._scaling_factor diff --git a/src/sentry/services/hybrid_cloud/integration/impl.py b/src/sentry/services/hybrid_cloud/integration/impl.py index 5a02f9c913cdd1..4b7864a10495a6 100644 --- a/src/sentry/services/hybrid_cloud/integration/impl.py +++ b/src/sentry/services/hybrid_cloud/integration/impl.py @@ -10,7 +10,7 @@ from sentry.api.paginator import OffsetPaginator from sentry.api.serializers import AppPlatformEvent from sentry.constants import SentryAppInstallationStatus -from sentry.incidents.models import INCIDENT_STATUS, IncidentStatus +from sentry.incidents.models.incident import INCIDENT_STATUS, IncidentStatus from sentry.integrations.mixins import NotifyBasicMixin from sentry.integrations.msteams import MsTeamsClient from sentry.models.integrations import Integration, OrganizationIntegration @@ -32,7 +32,6 @@ serialize_integration_external_project, serialize_organization_integration, ) -from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult from sentry.shared_integrations.exceptions import ApiError from sentry.utils import json, metrics @@ -357,15 +356,10 @@ def send_incident_alert_notification( incident_id: int, new_status: int, incident_attachment_json: str, - organization: RpcOrganizationSummary | None = None, # deprecated - organization_id: int | None = None, + organization_id: int, metric_value: str | None = None, notification_uuid: str | None = None, ) -> bool: - if organization_id is None and organization is not None: - organization_id = organization.id - assert organization_id is not None, "organization or organization_id is required" - sentry_app = SentryApp.objects.get(id=sentry_app_id) metrics.incr("notifications.sent", instance=sentry_app.slug, skip_internal=False) diff --git a/src/sentry/services/hybrid_cloud/integration/service.py b/src/sentry/services/hybrid_cloud/integration/service.py index fbe7932ab7be39..6776e7814d8ec0 100644 --- a/src/sentry/services/hybrid_cloud/integration/service.py +++ b/src/sentry/services/hybrid_cloud/integration/service.py @@ -12,7 +12,6 @@ RpcIntegrationExternalProject, RpcIntegrationIdentityContext, ) -from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method from sentry.silo import SiloMode @@ -238,8 +237,7 @@ def send_incident_alert_notification( incident_id: int, new_status: int, incident_attachment_json: str, - organization: RpcOrganizationSummary | None = None, - organization_id: int | None = None, + organization_id: int, metric_value: str | None = None, notification_uuid: str | None = None, ) -> bool: diff --git a/src/sentry/services/hybrid_cloud/rpc.py b/src/sentry/services/hybrid_cloud/rpc.py index 285d69a9ac1847..13deb3cf10cd1f 100644 --- a/src/sentry/services/hybrid_cloud/rpc.py +++ b/src/sentry/services/hybrid_cloud/rpc.py @@ -518,8 +518,9 @@ def _remote_exception(self, message: str) -> RpcRemoteException: return RpcRemoteException(self.service_name, self.method_name, message) def _raise_from_response_status_error(self, response: requests.Response) -> NoReturn: + rpc_method = f"{self.service_name}.{self.method_name}" with sentry_sdk.configure_scope() as scope: - scope.set_tag("rpc_method", f"{self.service_name}.{self.method_name}") + scope.set_tag("rpc_method", rpc_method) scope.set_tag("rpc_status_code", response.status_code) if in_test_environment(): @@ -535,6 +536,13 @@ def _raise_from_response_status_error(self, response: requests.Response) -> NoRe if response.status_code == 403: raise self._remote_exception("Unauthorized service access") if response.status_code == 400: + logger.warning( + "rpc.bad_request", + extra={ + "rpc_method": rpc_method, + "error": response.content.decode("utf8"), + }, + ) raise self._remote_exception("Invalid service request") raise self._remote_exception(f"Service unavailable ({response.status_code} status)") diff --git a/src/sentry/snuba/metrics/datasource.py b/src/sentry/snuba/metrics/datasource.py index fbb69ed83b6517..72e48eab8927df 100644 --- a/src/sentry/snuba/metrics/datasource.py +++ b/src/sentry/snuba/metrics/datasource.py @@ -46,6 +46,7 @@ from sentry.snuba.metrics.fields import run_metrics_query from sentry.snuba.metrics.fields.base import ( SnubaDataType, + build_metrics_query, get_derived_metrics, org_id_from_projects, ) @@ -85,7 +86,7 @@ get_intervals, to_intervals, ) -from sentry.utils.snuba import raw_snql_query +from sentry.utils.snuba import bulk_snql_query, raw_snql_query logger = logging.getLogger(__name__) @@ -112,6 +113,27 @@ def _get_metrics_for_entity( ) +def _get_metrics_by_project_for_entity_query( + entity_key: EntityKey, + project_ids: Sequence[int], + org_id: int, + use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, +) -> Request: + return build_metrics_query( + entity_key=entity_key, + select=[Column("project_id"), Column("metric_id")], + groupby=[Column("project_id"), Column("metric_id")], + where=[Condition(Column("use_case_id"), Op.EQ, use_case_id.value)], + project_ids=project_ids, + org_id=org_id, + use_case_id=use_case_id, + start=start, + end=end, + ) + + def _get_metrics_by_project_for_entity( entity_key: EntityKey, project_ids: Sequence[int], @@ -186,12 +208,8 @@ def get_available_derived_metrics( def get_metrics_blocking_state_of_projects( - projects: Sequence[Project], use_case_id: UseCaseID + projects: Sequence[Project], ) -> dict[str, Sequence[tuple[bool, Sequence[str], int]]]: - # Blocked metrics are only supported for custom metrics. - if use_case_id != UseCaseID.CUSTOM: - return {} - metrics_blocking_state_by_project = get_metrics_blocking_state(projects) metrics_blocking_state_by_mri = {} @@ -220,15 +238,17 @@ def _build_metric_meta( def get_metrics_meta( projects: Sequence[Project], - use_case_id: UseCaseID, + use_case_ids: Sequence[UseCaseID], start: datetime | None = None, end: datetime | None = None, ) -> Sequence[MetricMeta]: if not projects: return [] - stored_metrics = get_stored_metrics_of_projects(projects, use_case_id, start, end) - metrics_blocking_state = get_metrics_blocking_state_of_projects(projects, use_case_id) + stored_metrics = get_stored_metrics_of_projects(projects, use_case_ids, start, end) + metrics_blocking_state = ( + get_metrics_blocking_state_of_projects(projects) if UseCaseID.CUSTOM in use_case_ids else {} + ) metrics_metas = [] for metric_mri, project_ids in stored_metrics.items(): @@ -276,38 +296,64 @@ def get_metrics_meta( def get_stored_metrics_of_projects( projects: Sequence[Project], - use_case_id: UseCaseID, + use_case_ids: Sequence[UseCaseID], start: datetime | None = None, end: datetime | None = None, ) -> Mapping[str, Sequence[int]]: org_id = projects[0].organization_id project_ids = [project.id for project in projects] - stored_metrics = [] - entity_keys = get_entity_keys_of_use_case_id(use_case_id=use_case_id) - for entity_key in entity_keys or (): - stored_metrics += _get_metrics_by_project_for_entity( - entity_key=entity_key, - project_ids=project_ids, - org_id=org_id, - use_case_id=use_case_id, - start=start, - end=end, - ) + # We compute a list of all the queries that we want to run in parallel across entities and use cases. + requests = [] + use_case_id_to_index = defaultdict(list) + for use_case_id in use_case_ids: + entity_keys = get_entity_keys_of_use_case_id(use_case_id=use_case_id) + for entity_key in entity_keys: + requests.append( + _get_metrics_by_project_for_entity_query( + entity_key=entity_key, + project_ids=project_ids, + org_id=org_id, + use_case_id=use_case_id, + start=start, + end=end, + ) + ) + use_case_id_to_index[use_case_id].append(len(requests) - 1) - grouped_stored_metrics = {} - for stored_metric in stored_metrics: - grouped_stored_metrics.setdefault(stored_metric["metric_id"], []).append( - stored_metric["project_id"] + # We run the queries all in parallel. + results = bulk_snql_query( + requests=requests, + referrer="snuba.metrics.datasource.get_stored_metrics_of_projects", + use_cache=True, + ) + + # We reverse resolve all the metric ids by bulking together all the resolutions of the same use case id to maximize + # the parallelism. + resolved_metric_ids = defaultdict(dict) + for use_case_id, results_indexes in use_case_id_to_index.items(): + metrics_ids = [] + for result_index in results_indexes: + data = results[result_index]["data"] + for row in data or (): + metrics_ids.append(row["metric_id"]) + + # We have to partition the resolved metric ids per use case id, since the indexer values might clash across + # use cases. + resolved_metric_ids[use_case_id].update( + bulk_reverse_resolve(use_case_id, org_id, [metric_id for metric_id in metrics_ids]) ) - resolved_mris = bulk_reverse_resolve( - use_case_id, org_id, [metric_id for metric_id in grouped_stored_metrics.keys()] - ) + # We iterate over each result and compute a map of `metric_id -> project_id`. + grouped_stored_metrics = defaultdict(list) + for use_case_id, results_indexes in use_case_id_to_index.items(): + for result_index in results_indexes: + data = results[result_index]["data"] + for row in data or (): + resolved_metric_id = resolved_metric_ids[use_case_id][row["metric_id"]] + grouped_stored_metrics[resolved_metric_id].append(row["project_id"]) - return { - resolved_mris[metric_id]: projects for metric_id, projects in grouped_stored_metrics.items() - } + return grouped_stored_metrics def get_custom_measurements( diff --git a/src/sentry/snuba/metrics/fields/base.py b/src/sentry/snuba/metrics/fields/base.py index bb489f2f81b502..d2dc6d95b9ea47 100644 --- a/src/sentry/snuba/metrics/fields/base.py +++ b/src/sentry/snuba/metrics/fields/base.py @@ -105,7 +105,7 @@ MetricOperationParams = Mapping[str, Union[str, int, float]] -def run_metrics_query( +def build_metrics_query( *, entity_key: EntityKey, select: list[Column], @@ -113,18 +113,15 @@ def run_metrics_query( groupby: list[Column], project_ids: Sequence[int], org_id: int, - referrer: str, use_case_id: UseCaseID, start: datetime | None = None, end: datetime | None = None, -) -> list[SnubaDataType]: +) -> Request: if end is None: end = datetime.now() if start is None: start = end - timedelta(hours=24) - # Round timestamp to minute to get cache efficiency: - # Also floor start to match the daily granularity end = end.replace(second=0, microsecond=0) start = start.replace(hour=0, minute=0, second=0, microsecond=0) @@ -141,12 +138,43 @@ def run_metrics_query( + where, granularity=Granularity(GRANULARITY), ) + request = Request( - dataset=Dataset.Metrics.value, + dataset=Dataset.Metrics.value + if use_case_id == UseCaseID.SESSIONS + else Dataset.PerformanceMetrics.value, app_id="metrics", query=query, tenant_ids={"organization_id": org_id, "use_case_id": use_case_id.value}, ) + + return request + + +def run_metrics_query( + *, + entity_key: EntityKey, + select: list[Column], + where: list[Condition], + groupby: list[Column], + project_ids: Sequence[int], + org_id: int, + referrer: str, + use_case_id: UseCaseID, + start: datetime | None = None, + end: datetime | None = None, +) -> list[SnubaDataType]: + request = build_metrics_query( + entity_key=entity_key, + select=select, + where=where, + groupby=groupby, + project_ids=project_ids, + org_id=org_id, + use_case_id=use_case_id, + start=start, + end=end, + ) result = raw_snql_query(request, referrer, use_cache=True) return result["data"] diff --git a/src/sentry/snuba/metrics_layer/query.py b/src/sentry/snuba/metrics_layer/query.py index 8dc85d1498f532..e7b541137f968a 100644 --- a/src/sentry/snuba/metrics_layer/query.py +++ b/src/sentry/snuba/metrics_layer/query.py @@ -18,6 +18,7 @@ Timeseries, ) from snuba_sdk.formula import FormulaParameterGroup +from snuba_sdk.mql.mql import parse_mql from sentry.exceptions import InvalidParams from sentry.sentry_metrics.use_case_id_registry import UseCaseID @@ -137,6 +138,10 @@ def _setup_metrics_query(request: Request) -> tuple[Request, datetime, datetime] metrics_query = request.query assert isinstance(metrics_query, MetricsQuery) + # We allow users to pass in a string instead of a Formula/Timeseries object. Handle that case here. + if isinstance(metrics_query.query, str): + metrics_query = metrics_query.set_query(parse_mql(metrics_query.query)) + assert len(metrics_query.scope.org_ids) == 1 # Initially only allow 1 org id organization_id = metrics_query.scope.org_ids[0] tenant_ids = request.tenant_ids or {"organization_id": organization_id} @@ -265,7 +270,14 @@ def _resolve_query_metadata( assert metrics_query.query is not None org_id = metrics_query.scope.org_ids[0] - use_case_id_str = _resolve_use_case_id_str(metrics_query.query) + use_case_ids = _resolve_use_case_ids(metrics_query.query) + + if not use_case_ids: + raise InvalidParams("No use case found in formula parameters") + if len(use_case_ids) > 1: + raise InvalidParams("Formula parameters must all be from the same use case") + use_case_id_str = use_case_ids.pop() + if metrics_query.scope.use_case_id is None: metrics_query = metrics_query.set_scope( metrics_query.scope.set_use_case_id(use_case_id_str) @@ -331,7 +343,7 @@ def _resolve_timeseries_metadata( return series, mappings -def _resolve_use_case_id_str(exp: Formula | Timeseries) -> str: +def _resolve_use_case_ids(exp: Formula | Timeseries) -> set[str]: def fetch_namespace(metric: Metric) -> str: if metric.mri is None: mri = get_mri(metric.public_name) @@ -344,20 +356,15 @@ def fetch_namespace(metric: Metric) -> str: return parsed_mri.namespace if isinstance(exp, Timeseries): - return fetch_namespace(exp.metric) + return {fetch_namespace(exp.metric)} assert isinstance(exp, Formula), exp namespaces = set() for p in exp.parameters: if isinstance(p, (Formula, Timeseries)): - namespaces.add(_resolve_use_case_id_str(p)) - - if not namespaces: - raise InvalidParams("No use case found in formula parameters") - if len(namespaces) > 1: - raise InvalidParams("Formula parameters must all be from the same use case") + namespaces |= _resolve_use_case_ids(p) - return namespaces.pop() + return namespaces def _lookup_indexer_resolve( diff --git a/src/sentry/snuba/models.py b/src/sentry/snuba/models.py index c22137be580c78..bd9377695a7e72 100644 --- a/src/sentry/snuba/models.py +++ b/src/sentry/snuba/models.py @@ -52,7 +52,7 @@ def event_types(self): @classmethod def query_for_relocation_export(cls, q: models.Q, pk_map: PrimaryKeyMap) -> models.Q: - from sentry.incidents.models import AlertRule + from sentry.incidents.models.alert_rule import AlertRule from sentry.models.actor import Actor from sentry.models.organization import Organization from sentry.models.project import Project diff --git a/src/sentry/tasks/check_am2_compatibility.py b/src/sentry/tasks/check_am2_compatibility.py index 0ab5e20c1c3ff0..abc135146b8fd8 100644 --- a/src/sentry/tasks/check_am2_compatibility.py +++ b/src/sentry/tasks/check_am2_compatibility.py @@ -9,7 +9,7 @@ from sentry.dynamic_sampling import get_redis_client_for_ds from sentry.exceptions import IncompatibleMetricsQuery -from sentry.incidents.models import AlertRule +from sentry.incidents.models.alert_rule import AlertRule from sentry.models.dashboard_widget import ( ON_DEMAND_ENABLED_KEY, DashboardWidgetQuery, diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 746ba13bbff193..0b7b9f93175925 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -22,7 +22,7 @@ from sentry.utils.locking import UnableToAcquireLock from sentry.utils.safe import get_path -SUPPORTED_LANGUAGES = ["javascript", "python", "node", "ruby"] +SUPPORTED_LANGUAGES = ["javascript", "python", "node", "ruby", "php"] logger = logging.getLogger(__name__) @@ -92,13 +92,19 @@ def derive_code_mappings( "organization.slug": org.slug, } - if ( - not features.has("organizations:derive-code-mappings", org) - or not data["platform"] in SUPPORTED_LANGUAGES + if not ( + features.has("organizations:derive-code-mappings", org) + and data.get("platform") in SUPPORTED_LANGUAGES ): logger.info("Event should not be processed.", extra=extra) return + # php automatic code mappings currently in LA + if data["platform"].startswith("php") and not features.has( + "organizations:derive-code-mappings-php", org + ): + return + stacktrace_paths: list[str] = identify_stacktrace_paths(data) if not stacktrace_paths: return diff --git a/src/sentry/tasks/files.py b/src/sentry/tasks/files.py index 868ef90df186bd..fa1f12b58cc69f 100644 --- a/src/sentry/tasks/files.py +++ b/src/sentry/tasks/files.py @@ -42,15 +42,13 @@ def delete_file_control(path, checksum, **kwargs): def delete_file(file_blob_model, path, checksum, **kwargs): - from sentry.models.files.utils import get_storage, lock_blob - - lock = lock_blob(checksum, "fileblob_upload") - with lock: - # check that the fileblob with *this* path exists, as its possible - # that a concurrent re-upload added the same chunk once again, with a - # different path that time - if not file_blob_model.objects.filter(checksum=checksum, path=path).exists(): - get_storage().delete(path) + from sentry.models.files.utils import get_storage + + # check that the fileblob with *this* path exists, as its possible + # that a concurrent re-upload added the same chunk once again, with a + # different path that time + if not file_blob_model.objects.filter(checksum=checksum, path=path).exists(): + get_storage().delete(path) @instrumented_task( diff --git a/src/sentry/tasks/groupowner.py b/src/sentry/tasks/groupowner.py index aa0796af38ab21..a89838eb097c58 100644 --- a/src/sentry/tasks/groupowner.py +++ b/src/sentry/tasks/groupowner.py @@ -92,6 +92,15 @@ def _process_suspect_commits( pass else: owner.delete() + logger.info( + "process_suspect_commits.group_owner_removed", + extra={ + "event": event_id, + "group": group_id, + "owner_id": owner.user_id, + "project": project_id, + }, + ) except GroupOwner.MultipleObjectsReturned: GroupOwner.objects.filter( group_id=group_id, @@ -100,6 +109,15 @@ def _process_suspect_commits( project=project, organization_id=project.organization_id, )[0].delete() + logger.info( + "process_suspect_commits.multiple_owners_removed", + extra={ + "event": event_id, + "group": group_id, + "owner_id": owner_id, + "project": project_id, + }, + ) cache.set( cache_key, True, PREFERRED_GROUP_OWNER_AGE.total_seconds() diff --git a/src/sentry/tasks/integrations/slack/find_channel_id_for_alert_rule.py b/src/sentry/tasks/integrations/slack/find_channel_id_for_alert_rule.py index cedcf9fd9fddfa..6fe4316cdb4974 100644 --- a/src/sentry/tasks/integrations/slack/find_channel_id_for_alert_rule.py +++ b/src/sentry/tasks/integrations/slack/find_channel_id_for_alert_rule.py @@ -11,7 +11,7 @@ InvalidTriggerActionError, get_slack_channel_ids, ) -from sentry.incidents.models import AlertRule +from sentry.incidents.models.alert_rule import AlertRule from sentry.incidents.serializers import AlertRuleSerializer from sentry.integrations.slack.utils import SLACK_RATE_LIMITED_MESSAGE, RedisRuleStatus from sentry.models.organization import Organization diff --git a/src/sentry/tasks/integrations/slack/find_channel_id_for_rule.py b/src/sentry/tasks/integrations/slack/find_channel_id_for_rule.py index 168c7dfdc3843e..66847e5b3cb54a 100644 --- a/src/sentry/tasks/integrations/slack/find_channel_id_for_rule.py +++ b/src/sentry/tasks/integrations/slack/find_channel_id_for_rule.py @@ -2,7 +2,7 @@ from collections.abc import Sequence from typing import Any -from sentry.incidents.models import AlertRuleTriggerAction +from sentry.incidents.models.alert_rule import AlertRuleTriggerAction from sentry.integrations.slack.utils import ( SLACK_RATE_LIMITED_MESSAGE, RedisRuleStatus, diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index e559d929f2e33d..70087b9dfff336 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -339,6 +339,10 @@ def handle_invalid_group_owners(group): ) for owner in invalid_group_owners: owner.delete() + logger.info( + "handle_invalid_group_owners.delete_group_owner", + extra={"group": group.id, "group_owner_id": owner.id, "project": group.project_id}, + ) def handle_group_owners( @@ -358,9 +362,11 @@ def handle_group_owners( lock = locks.get(f"groupowner-bulk:{group.id}", duration=10, name="groupowner_bulk") try: - with metrics.timer("post_process.handle_group_owners"), sentry_sdk.start_span( - op="post_process.handle_group_owners" - ), lock.acquire(): + with ( + metrics.timer("post_process.handle_group_owners"), + sentry_sdk.start_span(op="post_process.handle_group_owners"), + lock.acquire(), + ): current_group_owners = GroupOwner.objects.filter( group=group, type__in=[GroupOwnerType.OWNERSHIP_RULE.value, GroupOwnerType.CODEOWNERS.value], @@ -377,6 +383,12 @@ def handle_group_owners( # Owners already in the database that we'll keep keeping_owners = set() for group_owner in current_group_owners: + logging_params = { + "group": group.id, + "project": project.id, + "organization": project.organization_id, + "group_owner_id": group_owner.id, + } owner_rule_type = ( OwnerRuleType.CODEOWNERS.value if group_owner.type == GroupOwnerType.CODEOWNERS.value @@ -391,6 +403,10 @@ def handle_group_owners( lookup_key_value = None if lookup_key not in new_owners: group_owner.delete() + logger.info( + "handle_group_owners.delete_group_owner", + extra={**logging_params, "reason": "assignment_deleted"}, + ) else: lookup_key_value = new_owners.get(lookup_key) # Old groupowner assignment from outdated rules get deleted @@ -399,6 +415,10 @@ def handle_group_owners( and (group_owner.context or {}).get("rule") not in lookup_key_value ): group_owner.delete() + logger.info( + "handle_group_owners.delete_group_owner", + extra={**logging_params, "reason": "outdated_rule"}, + ) else: keeping_owners.add(lookup_key) @@ -439,6 +459,15 @@ def handle_group_owners( instance=go, created=True, ) + logger.info( + "group_owners.bulk_create", + extra={ + "group_id": group.id, + "project_id": project.id, + "organization_id": project.organization_id, + "count": len(new_group_owners), + }, + ) except UnableToAcquireLock: pass @@ -737,14 +766,17 @@ def run_post_process_job(job: PostProcessJob): for pipeline_step in pipeline: try: - with metrics.timer( - "tasks.post_process.run_post_process_job.pipeline.duration", - tags={ - "pipeline": pipeline_step.__name__, - "issue_category": issue_category_metric, - "is_reprocessed": job["is_reprocessed"], - }, - ), sentry_sdk.start_span(op=f"tasks.post_process_group.{pipeline_step.__name__}"): + with ( + metrics.timer( + "tasks.post_process.run_post_process_job.pipeline.duration", + tags={ + "pipeline": pipeline_step.__name__, + "issue_category": issue_category_metric, + "is_reprocessed": job["is_reprocessed"], + }, + ), + sentry_sdk.start_span(op=f"tasks.post_process_group.{pipeline_step.__name__}"), + ): pipeline_step(job) except Exception: metrics.incr( @@ -1008,12 +1040,6 @@ def _get_replay_id(event): if job["is_reprocessed"]: return - if not features.has( - "organizations:session-replay-event-linking", job["event"].project.organization - ): - metrics.incr("post_process.process_replay_link.feature_not_enabled") - return - metrics.incr("post_process.process_replay_link.id_sampled") group_event = job["event"] @@ -1100,7 +1126,7 @@ def process_code_mappings(job: PostProcessJob) -> None: with metrics.timer("post_process.process_code_mappings.duration"): # Supported platforms - if event.data["platform"] not in SUPPORTED_LANGUAGES: + if event.data.get("platform") not in SUPPORTED_LANGUAGES: return # To limit the overall number of tasks, only process one issue per project per hour. In @@ -1180,10 +1206,7 @@ def process_commits(job: PostProcessJob) -> None: # Cache the integrations check for 4 hours cache.set(integration_cache_key, has_integrations, 14400) - if ( - features.has("organizations:commit-context", event.project.organization) - and has_integrations - ): + if has_integrations: if not job["group_state"]["is_new"]: return @@ -1386,7 +1409,7 @@ def should_postprocess_feedback(job: PostProcessJob) -> bool: return True should_notify_on_old_feedbacks = job["event"].project.get_option( - "sentry:replay_rage_click_issues" + "sentry:feedback_user_report_notifications" ) if ( diff --git a/src/sentry/tasks/spans.py b/src/sentry/tasks/spans.py index 5c1588d9132ff9..d3c596decc341a 100644 --- a/src/sentry/tasks/spans.py +++ b/src/sentry/tasks/spans.py @@ -97,6 +97,7 @@ def _update_occurrence_group_type(jobs: Sequence[Job], projects: ProjectsMapping performance_problems = job.pop("performance_problems") for performance_problem in performance_problems: performance_problem.type = PerformanceStreamedSpansGroupTypeExperimental + performance_problem.fingerprint = f"{performance_problem.fingerprint}-{PerformanceStreamedSpansGroupTypeExperimental.type_id}" updated_problems.append(performance_problem) job["performance_problems"] = updated_problems @@ -178,7 +179,10 @@ def _process_segment(project_id, segment_id): _pull_out_data(jobs, projects) _calculate_span_grouping(jobs, projects) - _detect_performance_problems(jobs, projects) + _detect_performance_problems(jobs, projects, is_standalone_spans=True) + + # Updates group type and fingerprint of all performance problems + # so they don't double write occurrences as we test. _update_occurrence_group_type(jobs, projects) return jobs diff --git a/src/sentry/tasks/summaries/daily_summary.py b/src/sentry/tasks/summaries/daily_summary.py index d94a115113bbce..3f8cfa1d7086fb 100644 --- a/src/sentry/tasks/summaries/daily_summary.py +++ b/src/sentry/tasks/summaries/daily_summary.py @@ -76,9 +76,14 @@ def schedule_organizations(timestamp: float | None = None, duration: int | None user_ids = { user_id for user_id in OrganizationMember.objects.filter( - organization_id=organization.id, teams__projectteam__project__isnull=False + organization_id=organization.id, + teams__projectteam__project__isnull=False, + user_id__isnull=False, ).values_list("user_id", flat=True) } + if not user_ids: + continue + # TODO: convert timezones to UTC offsets and group users_by_tz = defaultdict(list) users_with_tz = user_option_service.get_many( @@ -199,7 +204,7 @@ def build_summary_data( project=project, substatus__in=(GroupSubStatus.ESCALATING, GroupSubStatus.REGRESSED) ).using_replica() regressed_or_escalated_groups_today = Activity.objects.filter( - group__in=(regressed_or_escalated_groups), + group__in=([group for group in regressed_or_escalated_groups]), type__in=(ActivityType.SET_REGRESSION.value, ActivityType.SET_ESCALATING.value), ) if regressed_or_escalated_groups_today: @@ -262,6 +267,10 @@ def deliver_summary(ctx: OrganizationReportContext, users: list[int]): for user_id in user_ids: top_projects_context_map = build_top_projects_map(ctx, user_id) user = cast(RpcActor, user_service.get_user(user_id=user_id)) + logger.info( + "daily_summary.delivering_summary", + extra={"user": user_id, "organization": ctx.organization.id}, + ) DailySummaryNotification( organization=ctx.organization, recipient=user, diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index cbec06b057cf61..0d383cf7d862a8 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -637,8 +637,12 @@ def create_performance_issue( perf_event_manager = EventManager(event_data) perf_event_manager.normalize() - def detect_performance_problems_interceptor(data: Event, project: Project): - perf_problems = detect_performance_problems(data, project) + def detect_performance_problems_interceptor( + data: Event, project: Project, is_standalone_spans: bool = False + ): + perf_problems = detect_performance_problems( + data, project, is_standalone_spans=is_standalone_spans + ) if fingerprint: for perf_problem in perf_problems: perf_problem.fingerprint = fingerprint diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py index 51b3f83edc2812..72651aa8fa8a7c 100644 --- a/src/sentry/testutils/factories.py +++ b/src/sentry/testutils/factories.py @@ -35,10 +35,12 @@ create_alert_rule_trigger_action, query_datasets_to_type, ) -from sentry.incidents.models import ( +from sentry.incidents.models.alert_rule import ( AlertRuleMonitorType, AlertRuleThresholdType, AlertRuleTriggerAction, +) +from sentry.incidents.models.incident import ( Incident, IncidentActivity, IncidentProject, diff --git a/src/sentry/testutils/fixtures.py b/src/sentry/testutils/fixtures.py index c4f7145ef28005..bebcaebe4c429d 100644 --- a/src/sentry/testutils/fixtures.py +++ b/src/sentry/testutils/fixtures.py @@ -8,7 +8,8 @@ from django.utils.functional import cached_property from sentry.eventstore.models import Event -from sentry.incidents.models import AlertRuleMonitorType, IncidentActivityType +from sentry.incidents.models.alert_rule import AlertRuleMonitorType +from sentry.incidents.models.incident import IncidentActivityType from sentry.models.activity import Activity from sentry.models.actor import Actor, get_actor_id_for_user from sentry.models.grouprelease import GroupRelease diff --git a/src/sentry/testutils/helpers/apigateway.py b/src/sentry/testutils/helpers/apigateway.py index 215e0650a73e78..6684b6aa4de3c2 100644 --- a/src/sentry/testutils/helpers/apigateway.py +++ b/src/sentry/testutils/helpers/apigateway.py @@ -4,6 +4,7 @@ import responses from django.conf import settings +from django.http import HttpResponseRedirect from django.test import override_settings from django.urls import re_path from rest_framework.permissions import AllowAny @@ -32,6 +33,9 @@ class RegionEndpoint(OrganizationEndpoint): def get(self, request, organization): return Response({"proxy": False}) + def post(self, request, organization): + return HttpResponseRedirect("https://zombo.com") + @region_silo_endpoint class NoOrgRegionEndpoint(Endpoint): diff --git a/src/sentry/testutils/helpers/backups.py b/src/sentry/testutils/helpers/backups.py index 159127336b49c5..c9839daaef7f5f 100644 --- a/src/sentry/testutils/helpers/backups.py +++ b/src/sentry/testutils/helpers/backups.py @@ -42,8 +42,8 @@ from sentry.backup.validate import validate from sentry.db.models.fields.bounded import BoundedBigAutoField from sentry.db.models.paranoia import ParanoidModel -from sentry.incidents.models import ( - AlertRuleMonitorType, +from sentry.incidents.models.alert_rule import AlertRuleMonitorType +from sentry.incidents.models.incident import ( IncidentActivity, IncidentSnapshot, IncidentSubscription, diff --git a/src/sentry/testutils/pytest/kafka.py b/src/sentry/testutils/pytest/kafka.py index 151349f3bf6a9e..aaaa8029b16ce3 100644 --- a/src/sentry/testutils/pytest/kafka.py +++ b/src/sentry/testutils/pytest/kafka.py @@ -63,32 +63,6 @@ def inner(settings): return inner -@pytest.fixture -def kafka_topics_setter(): - """ - Returns a function that given a Django settings objects will setup the - kafka topics names to test names. - - :return: a function that given a settings object changes all kafka topic names - to "test-" - """ - - def set_test_kafka_settings(settings): - settings.KAFKA_INGEST_EVENTS = "ingest-events" - settings.KAFKA_TOPICS[settings.KAFKA_INGEST_EVENTS] = {"cluster": "default"} - - settings.INGEST_TRANSACTIONS = "ingest-transactions" - settings.KAFKA_TOPICS[settings.INGEST_TRANSACTIONS] = {"cluster": "default"} - - settings.KAFKA_INGEST_ATTACHMENTS = "ingest-attachments" - settings.KAFKA_TOPICS[settings.KAFKA_INGEST_ATTACHMENTS] = {"cluster": "default"} - - settings.KAFKA_OUTCOMES = "outcomes" - settings.KAFKA_TOPICS[settings.KAFKA_OUTCOMES] = {"cluster": "default"} - - return set_test_kafka_settings - - @pytest.fixture(scope="session") def scope_consumers(): """ diff --git a/src/sentry/types/region.py b/src/sentry/types/region.py index 40b67ffe8a664a..80b3db7d11f777 100644 --- a/src/sentry/types/region.py +++ b/src/sentry/types/region.py @@ -62,6 +62,9 @@ class Region: category: RegionCategory """The region's category.""" + visible: bool = True + """Whether the region is visible in API responses""" + def validate(self) -> None: from sentry.utils.snowflake import REGION_ID @@ -135,6 +138,9 @@ def regions(self) -> frozenset[Region]: def get_by_name(self, region_name: str) -> Region | None: return self._by_name.get(region_name) + def get_regions(self, category: RegionCategory | None = None) -> Iterable[Region]: + return (r for r in self.regions if (category is None or r.category == category)) + def get_region_names(self, category: RegionCategory | None = None) -> Iterable[str]: return (r.name for r in self.regions if (category is None or r.category == category)) @@ -335,7 +341,11 @@ def find_all_region_names() -> Iterable[str]: def find_all_multitenant_region_names() -> list[str]: - return list(get_global_directory().get_region_names(RegionCategory.MULTI_TENANT)) + """ + Return all visible multi_tenant regions. + """ + regions = get_global_directory().get_regions(RegionCategory.MULTI_TENANT) + return list([r.name for r in regions if r.visible]) def find_all_region_addresses() -> Iterable[str]: diff --git a/src/sentry/usage_accountant/accountant.py b/src/sentry/usage_accountant/accountant.py index 2ecf3c49f75c03..ee1e98a8c9cc8f 100644 --- a/src/sentry/usage_accountant/accountant.py +++ b/src/sentry/usage_accountant/accountant.py @@ -12,9 +12,9 @@ from arroyo.backends.abstract import Producer from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration -from django.conf import settings from usageaccountant import UsageAccumulator, UsageUnit +from sentry.conf.types.kafka_definition import Topic from sentry.options import get from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition @@ -71,7 +71,7 @@ def record( if _accountant_backend is None: cluster_name = get_topic_definition( - settings.KAFKA_SHARED_RESOURCES_USAGE, + Topic.SHARED_RESOURCES_USAGE, )["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer = KafkaProducer( diff --git a/src/sentry/utils/hashlib.py b/src/sentry/utils/hashlib.py index 608c290bd06b2c..72371bedc81fa8 100644 --- a/src/sentry/utils/hashlib.py +++ b/src/sentry/utils/hashlib.py @@ -73,3 +73,19 @@ def hash_values( for value in values: hash_value(_hash, value) return _hash.hexdigest() + + +def fnv1a_32(data: bytes) -> int: + """ + Fowler–Noll–Vo hash function 32 bit implementation. + """ + fnv_init = 0x811C9DC5 + fnv_prime = 0x01000193 + fnv_size = 2**32 + + result_hash = fnv_init + for byte in data: + result_hash ^= byte + result_hash = (result_hash * fnv_prime) % fnv_size + + return result_hash diff --git a/src/sentry/utils/kafka_config.py b/src/sentry/utils/kafka_config.py index 2ca53a67bf3a47..93e3c4fc87a126 100644 --- a/src/sentry/utils/kafka_config.py +++ b/src/sentry/utils/kafka_config.py @@ -3,6 +3,7 @@ from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.conf.types.topic_definition import TopicDefinition SUPPORTED_KAFKA_CONFIGURATION = ( @@ -96,9 +97,8 @@ def get_kafka_admin_cluster_options( ) -def get_topic_definition(topic: str) -> TopicDefinition: - defn = settings.KAFKA_TOPICS.get(topic) - if defn is not None: - return defn - else: - raise ValueError(f"Unknown {topic=}") +def get_topic_definition(topic: Topic) -> TopicDefinition: + return { + "cluster": settings.KAFKA_TOPIC_TO_CLUSTER[topic.value], + "real_topic_name": settings.KAFKA_TOPIC_OVERRIDES.get(topic.value, topic.value), + } diff --git a/src/sentry/utils/mockdata/core.py b/src/sentry/utils/mockdata/core.py index 3484d6d0d3131f..e2cff8e4bfbb55 100644 --- a/src/sentry/utils/mockdata/core.py +++ b/src/sentry/utils/mockdata/core.py @@ -20,7 +20,8 @@ from sentry.constants import ObjectStatus from sentry.exceptions import HashDiscarded from sentry.incidents.logic import create_alert_rule, create_alert_rule_trigger, create_incident -from sentry.incidents.models import AlertRuleThresholdType, IncidentType +from sentry.incidents.models.alert_rule import AlertRuleThresholdType +from sentry.incidents.models.incident import IncidentType from sentry.models.activity import Activity from sentry.models.broadcast import Broadcast from sentry.models.commit import Commit diff --git a/src/sentry/utils/outcomes.py b/src/sentry/utils/outcomes.py index 4aa2951b4bc0aa..f7f1947797d354 100644 --- a/src/sentry/utils/outcomes.py +++ b/src/sentry/utils/outcomes.py @@ -4,8 +4,7 @@ from datetime import datetime from enum import IntEnum -from django.conf import settings - +from sentry.conf.types.kafka_definition import Topic from sentry.constants import DataCategory from sentry.utils import json, kafka_config, metrics from sentry.utils.dates import to_datetime @@ -72,8 +71,8 @@ def track_outcome( assert isinstance(category, (type(None), DataCategory)) assert isinstance(quantity, int) - outcomes_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES) - billing_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES_BILLING) + outcomes_config = kafka_config.get_topic_definition(Topic.OUTCOMES) + billing_config = kafka_config.get_topic_definition(Topic.OUTCOMES_BILLING) use_billing = outcome.is_billing() @@ -97,14 +96,10 @@ def track_outcome( timestamp = timestamp or to_datetime(time.time()) - # Send billing outcomes to a dedicated topic if there is a separate - # configuration for it. Otherwise, fall back to the regular outcomes topic. - # This does NOT switch the producer, if both topics are on the same cluster. - # - # In Sentry, there is no significant difference between the classes of - # outcome. In Sentry SaaS, they have elevated stability requirements as they - # are used for spike protection and quota enforcement. - topic_name = settings.KAFKA_OUTCOMES_BILLING if use_billing else settings.KAFKA_OUTCOMES + # Send billing outcomes to a dedicated topic. + topic_name = ( + billing_config["real_topic_name"] if use_billing else outcomes_config["real_topic_name"] + ) # Send a snuba metrics payload. publisher.publish( diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index 7f5a3afdff00da..8eb20d550a2e9a 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -110,7 +110,9 @@ def fetch_multi( # Facade in front of performance detection to limit impact of detection on our events ingestion -def detect_performance_problems(data: dict[str, Any], project: Project) -> list[PerformanceProblem]: +def detect_performance_problems( + data: dict[str, Any], project: Project, is_standalone_spans: bool = False +) -> list[PerformanceProblem]: try: rate = options.get("performance.issues.all.problem-detection") if rate and rate > random.random(): @@ -121,7 +123,9 @@ def detect_performance_problems(data: dict[str, Any], project: Project) -> list[ ), sentry_sdk.start_span( op="py.detect_performance_issue", description="none" ) as sdk_span: - return _detect_performance_problems(data, sdk_span, project) + return _detect_performance_problems( + data, sdk_span, project, is_standalone_spans=is_standalone_spans + ) except Exception: logging.exception("Failed to detect performance problems") return [] @@ -325,7 +329,7 @@ def get_detection_settings(project_id: int | None = None) -> dict[DetectorType, def _detect_performance_problems( - data: dict[str, Any], sdk_span: Any, project: Project + data: dict[str, Any], sdk_span: Any, project: Project, is_standalone_spans: bool = False ) -> list[PerformanceProblem]: event_id = data.get("event_id", None) @@ -340,7 +344,14 @@ def _detect_performance_problems( run_detector_on_data(detector, data) # Metrics reporting only for detection, not created issues. - report_metrics_for_detectors(data, event_id, detectors, sdk_span, project.organization) + report_metrics_for_detectors( + data, + event_id, + detectors, + sdk_span, + project.organization, + is_standalone_spans=is_standalone_spans, + ) organization = project.organization if project is None or organization is None: @@ -396,6 +407,7 @@ def report_metrics_for_detectors( detectors: Sequence[PerformanceDetector], sdk_span: Any, organization: Organization, + is_standalone_spans: bool = False, ): all_detected_problems = [i for d in detectors for i in d.stored_problems] has_detected_problems = bool(all_detected_problems) @@ -410,10 +422,11 @@ def report_metrics_for_detectors( if has_detected_problems: set_tag("_pi_all_issue_count", len(all_detected_problems)) set_tag("_pi_sdk_name", sdk_name or "") + set_tag("is_standalone_spans", is_standalone_spans) metrics.incr( "performance.performance_issue.aggregate", len(all_detected_problems), - tags={"sdk_name": sdk_name}, + tags={"sdk_name": sdk_name, "is_standalone_spans": is_standalone_spans}, ) if event_id: set_tag("_pi_transaction", event_id) @@ -444,6 +457,7 @@ def report_metrics_for_detectors( detected_tags = { "sdk_name": sdk_name, "is_early_adopter": organization.flags.early_adopter.is_set, + "is_standalone_spans": is_standalone_spans, } event_integrations = event.get("sdk", {}).get("integrations", []) or [] diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 5dbba56f16ba72..02622eef964854 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -217,8 +217,11 @@ def before_send_transaction(event, _): def before_send(event, _): - if event.get("tags") and settings.SILO_MODE: - event["tags"]["silo_mode"] = settings.SILO_MODE + if event.get("tags"): + if settings.SILO_MODE: + event["tags"]["silo_mode"] = settings.SILO_MODE + if settings.SENTRY_REGION: + event["tags"]["sentry_region"] = settings.SENTRY_REGION return event diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 38fcd47748cd0a..337e3d25eecf30 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -134,7 +134,8 @@ def log_snuba_info(content): "segment.id": "segment_id", "transaction.op": "transaction_op", "user": "user", - "profile_id": "profile_id", + "profile_id": "profile_id", # deprecated in favour of `profile.id` + "profile.id": "profile_id", "transaction.method": "sentry_tags[transaction.method]", "system": "sentry_tags[system]", "raw_domain": "sentry_tags[raw_domain]", diff --git a/src/sentry/web/client_config.py b/src/sentry/web/client_config.py index 42b0c7e97c75f3..7b9823f0410880 100644 --- a/src/sentry/web/client_config.py +++ b/src/sentry/web/client_config.py @@ -328,16 +328,23 @@ def regions(self) -> list[Mapping[str, Any]]: has membership on any single-tenant regions those will also be included. """ user = self.user + + # Only expose visible regions. + # When new regions are added they can take some work to get working correctly. + # Before they are working we need ways to bring parts of the region online without + # exposing the region to customers. region_names = find_all_multitenant_region_names() + if not region_names: return [{"name": "default", "url": options.get("system.url-prefix")}] - # No logged in user. + # Show all visible multi-tenant regions to unauthenticated users as they could + # create a new account if not user or not user.id: - return [get_region_by_name(region).api_serialize() for region in region_names] + return [get_region_by_name(name).api_serialize() for name in region_names] # Ensure all regions the current user is in are included as there - # could be single tenants as well. + # could be single tenants or hidden regions memberships = user_service.get_organizations(user_id=user.id) unique_regions = set(region_names) | {membership.region_name for membership in memberships} diff --git a/src/sentry/web/frontend/debug/debug_incident_activity_email.py b/src/sentry/web/frontend/debug/debug_incident_activity_email.py index 5338713358a884..1fbd288625ae6d 100644 --- a/src/sentry/web/frontend/debug/debug_incident_activity_email.py +++ b/src/sentry/web/frontend/debug/debug_incident_activity_email.py @@ -1,7 +1,7 @@ from django.http import HttpRequest, HttpResponse from django.views.generic import View -from sentry.incidents.models import Incident, IncidentActivity, IncidentActivityType +from sentry.incidents.models.incident import Incident, IncidentActivity, IncidentActivityType from sentry.incidents.tasks import generate_incident_activity_email from sentry.models.organization import Organization from sentry.models.user import User diff --git a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py index 5b22cff07ba5c3..cdb6893afb6b40 100644 --- a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py +++ b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py @@ -4,13 +4,8 @@ from django.utils import timezone from sentry.incidents.action_handlers import generate_incident_trigger_email_context -from sentry.incidents.models import ( - AlertRule, - AlertRuleTrigger, - Incident, - IncidentStatus, - TriggerStatus, -) +from sentry.incidents.models.alert_rule import AlertRule, AlertRuleTrigger +from sentry.incidents.models.incident import Incident, IncidentStatus, TriggerStatus from sentry.models.organization import Organization from sentry.models.project import Project from sentry.models.user import User diff --git a/static/app/components/actions/resolve.spec.tsx b/static/app/components/actions/resolve.spec.tsx index caba10b4e8fae6..59130c9b8f8c61 100644 --- a/static/app/components/actions/resolve.spec.tsx +++ b/static/app/components/actions/resolve.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {ReleaseFixture} from 'sentry-fixture/release'; import { @@ -8,6 +7,7 @@ import { userEvent, within, } from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ResolveActions from 'sentry/components/actions/resolve'; import ModalStore from 'sentry/stores/modalStore'; @@ -149,7 +149,7 @@ describe('ResolveActions', function () { await userEvent.click(screen.getByLabelText('More resolve options')); await userEvent.click(screen.getByText('Another existing release…')); - selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); + await selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); expect(await screen.findByText('1.2.0')).toBeInTheDocument(); await userEvent.click(screen.getByText('1.2.0')); diff --git a/static/app/components/contextPickerModal.spec.tsx b/static/app/components/contextPickerModal.spec.tsx index bfd943d0023be0..36069db3b63227 100644 --- a/static/app/components/contextPickerModal.spec.tsx +++ b/static/app/components/contextPickerModal.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import {GitHubIntegrationFixture} from 'sentry-fixture/githubIntegration'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {UserFixture} from 'sentry-fixture/user'; import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ContextPickerModal from 'sentry/components/contextPickerModal'; import { diff --git a/static/app/components/customCommitsResolutionModal.spec.tsx b/static/app/components/customCommitsResolutionModal.spec.tsx index 8cbafed1ff3f66..686a5373141699 100644 --- a/static/app/components/customCommitsResolutionModal.spec.tsx +++ b/static/app/components/customCommitsResolutionModal.spec.tsx @@ -1,8 +1,8 @@ -import selectEvent from 'react-select-event'; import styled from '@emotion/styled'; import {CommitFixture} from 'sentry-fixture/commit'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import CustomCommitsResolutionModal from 'sentry/components/customCommitsResolutionModal'; import {makeCloseButton} from 'sentry/components/globalModal/components'; diff --git a/static/app/components/customResolutionModal.spec.tsx b/static/app/components/customResolutionModal.spec.tsx index d018f4d5648eb2..edc89c7cab5e1e 100644 --- a/static/app/components/customResolutionModal.spec.tsx +++ b/static/app/components/customResolutionModal.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import styled from '@emotion/styled'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ReleaseFixture} from 'sentry-fixture/release'; import {UserFixture} from 'sentry-fixture/user'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import CustomResolutionModal from 'sentry/components/customResolutionModal'; import {makeCloseButton} from 'sentry/components/globalModal/components'; @@ -43,7 +43,7 @@ describe('CustomResolutionModal', () => { ); expect(releasesMock).toHaveBeenCalled(); - selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); + await selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); expect(await screen.findByText('1.2.0')).toBeInTheDocument(); await userEvent.click(screen.getByText('1.2.0')); @@ -70,7 +70,7 @@ describe('CustomResolutionModal', () => { ); expect(releasesMock).toHaveBeenCalled(); - selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); + await selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); expect(await screen.findByText(/You committed/)).toBeInTheDocument(); }); @@ -120,7 +120,7 @@ describe('CustomResolutionModal', () => { /> ); - selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); + await selectEvent.openMenu(screen.getByText('e.g. 1.0.4')); expect( await screen.findByRole('menuitemradio', {name: 'abcdef (non-semver)'}) ).toBeInTheDocument(); diff --git a/static/app/components/ddm/metricSamplesTable.tsx b/static/app/components/ddm/metricSamplesTable.tsx index 9a985f333380cd..07c8151b3632e9 100644 --- a/static/app/components/ddm/metricSamplesTable.tsx +++ b/static/app/components/ddm/metricSamplesTable.tsx @@ -3,7 +3,7 @@ import styled from '@emotion/styled'; import type {LocationDescriptorObject} from 'history'; import debounce from 'lodash/debounce'; -import {LinkButton} from 'sentry/components/button'; +import {Button, LinkButton} from 'sentry/components/button'; import EmptyStateWarning from 'sentry/components/emptyStateWarning'; import GridEditable, { COL_WIDTH_UNDEFINED, @@ -525,7 +525,13 @@ function ProfileId({projectSlug, profileId}: {projectSlug: string; profileId?: s const organization = useOrganization(); if (!defined(profileId)) { - return {t('(no value)')}; + return ( + + + + ); } const target = generateProfileFlamechartRoute({ @@ -543,11 +549,6 @@ function ProfileId({projectSlug, profileId}: {projectSlug: string; profileId?: s ); } -const EmptyValueContainer = styled('span')` - color: ${p => p.theme.gray300}; - ${p => p.theme.overflowEllipsis}; -`; - const SearchBar = styled(SmartSearchBar)` margin-bottom: ${space(2)}; `; diff --git a/static/app/components/deprecatedforms/selectAsyncField.spec.tsx b/static/app/components/deprecatedforms/selectAsyncField.spec.tsx index 697bf7a1034c89..e26b5e37953a5d 100644 --- a/static/app/components/deprecatedforms/selectAsyncField.spec.tsx +++ b/static/app/components/deprecatedforms/selectAsyncField.spec.tsx @@ -1,14 +1,14 @@ -import selectEvent from 'react-select-event'; - import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import Form from 'sentry/components/deprecatedforms/form'; import SelectAsyncField from 'sentry/components/deprecatedforms/selectAsyncField'; describe('SelectAsyncField', function () { - let api; + let api: jest.Mock; beforeEach(function () { + MockApiClient.clearMockResponses(); api = MockApiClient.addMockResponse({ url: '/foo/bar/', body: { @@ -26,7 +26,7 @@ describe('SelectAsyncField', function () { it('supports autocomplete arguments from an integration', async function () { render(); - selectEvent.openMenu(screen.getByText('Select me')); + await selectEvent.openMenu(screen.getByText('Select me')); await userEvent.type(screen.getByRole('textbox'), 'baz'); expect(api).toHaveBeenCalled(); @@ -43,10 +43,9 @@ describe('SelectAsyncField', function () { ); - selectEvent.openMenu(screen.getByText('Select me')); + await selectEvent.openMenu(screen.getByText('Select me')); await userEvent.type(screen.getByRole('textbox'), 'baz'); - - await selectEvent.select(screen.getByText('Select me'), 'Baz Label'); + await userEvent.click(screen.getByText('Baz Label')); expect(screen.getByLabelText('form')).toHaveFormValues({ fieldName: 'baz', diff --git a/static/app/components/deprecatedforms/selectField.spec.tsx b/static/app/components/deprecatedforms/selectField.spec.tsx index 70795ca9801380..722c71c333f7b1 100644 --- a/static/app/components/deprecatedforms/selectField.spec.tsx +++ b/static/app/components/deprecatedforms/selectField.spec.tsx @@ -1,6 +1,5 @@ -import selectEvent from 'react-select-event'; - import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import Form from 'sentry/components/deprecatedforms/form'; import SelectField from 'sentry/components/deprecatedforms/selectField'; diff --git a/static/app/components/dropdownAutoComplete/types.tsx b/static/app/components/dropdownAutoComplete/types.tsx index 41fbe6d4f7f6ae..d3e9c80c66e80e 100644 --- a/static/app/components/dropdownAutoComplete/types.tsx +++ b/static/app/components/dropdownAutoComplete/types.tsx @@ -1,6 +1,6 @@ export type Item = { index: number; - label: ((value: any) => React.ReactNode) | React.ReactNode; + label: React.ReactNode; value: any; 'data-test-id'?: string; disabled?: boolean; diff --git a/static/app/components/dropdownMenu/footer.tsx b/static/app/components/dropdownMenu/footer.tsx new file mode 100644 index 00000000000000..de1c7d14cd5e54 --- /dev/null +++ b/static/app/components/dropdownMenu/footer.tsx @@ -0,0 +1,15 @@ +import styled from '@emotion/styled'; + +import {space} from 'sentry/styles/space'; + +/** + * Provides default styling for custom footer content in a `DropdownMenu`. + */ +export const DropdownMenuFooter = styled('div')` + border-top: solid 1px ${p => p.theme.innerBorder}; + padding: ${space(1)} ${space(1.5)}; + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + display: flex; + align-items: center; +`; diff --git a/static/app/components/dropdownMenu/list.tsx b/static/app/components/dropdownMenu/list.tsx index 95f68b54d20b95..d1907b352230bc 100644 --- a/static/app/components/dropdownMenu/list.tsx +++ b/static/app/components/dropdownMenu/list.tsx @@ -57,6 +57,10 @@ export interface DropdownMenuListProps * Whether the menu should close when an item has been clicked/selected */ closeOnSelect?: boolean; + /** + * To be displayed below the menu items + */ + menuFooter?: React.ReactChild; /** * Title to display on top of the menu */ @@ -74,6 +78,7 @@ function DropdownMenuList({ minMenuWidth, size, menuTitle, + menuFooter, overlayState, overlayPositionProps, ...props @@ -249,6 +254,7 @@ function DropdownMenuList({ > {renderCollection(stateCollection)} + {menuFooter} diff --git a/static/app/components/events/eventExtraData/index.spec.tsx b/static/app/components/events/eventExtraData/index.spec.tsx index d724e4fa70f8a6..0038ce1445fa49 100644 --- a/static/app/components/events/eventExtraData/index.spec.tsx +++ b/static/app/components/events/eventExtraData/index.spec.tsx @@ -178,6 +178,7 @@ describe('EventExtraData', function () { }, }); + await userEvent.click(screen.getByRole('button', {name: 'Expand'})); expect(await screen.findAllByText(/redacted/)).toHaveLength(10); await userEvent.hover(screen.getAllByText(/redacted/)[0]); diff --git a/static/app/components/events/eventReplay/constants.tsx b/static/app/components/events/eventReplay/constants.tsx new file mode 100644 index 00000000000000..0764635529a0ba --- /dev/null +++ b/static/app/components/events/eventReplay/constants.tsx @@ -0,0 +1 @@ +export const REPLAY_LOADING_HEIGHT = 480; diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx index 96ae7f273c1329..51a1949b68d113 100644 --- a/static/app/components/events/eventReplay/index.tsx +++ b/static/app/components/events/eventReplay/index.tsx @@ -4,6 +4,7 @@ import styled from '@emotion/styled'; import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; import ErrorBoundary from 'sentry/components/errorBoundary'; +import {REPLAY_LOADING_HEIGHT} from 'sentry/components/events/eventReplay/constants'; import {EventReplaySection} from 'sentry/components/events/eventReplay/eventReplaySection'; import LazyLoad from 'sentry/components/lazyLoad'; import LoadingIndicator from 'sentry/components/loadingIndicator'; @@ -109,6 +110,7 @@ function EventReplayContent({ {...commonProps} component={replayClipPreview} clipOffsets={CLIP_OFFSETS} + issueCategory={group?.issueCategory} /> ) : ( @@ -144,10 +146,10 @@ export default function EventReplay({event, group, projectSlug}: Props) { // The min-height here is due to max-height that is set in replayPreview.tsx const ReplaySectionMinHeight = styled(EventReplaySection)` - min-height: 508px; + min-height: 557px; `; const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` - height: 400px; + height: ${REPLAY_LOADING_HEIGHT}px; margin-bottom: ${space(2)}; `; diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index 82d4b9bf2fbe5d..de444df33da8c1 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -7,6 +7,7 @@ import {LinkButton} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; import ErrorBoundary from 'sentry/components/errorBoundary'; +import {REPLAY_LOADING_HEIGHT} from 'sentry/components/events/eventReplay/constants'; import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; @@ -27,11 +28,14 @@ import TimeAndScrubberGrid from 'sentry/components/replays/timeAndScrubberGrid'; import {IconDelete} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import {IssueCategory} from 'sentry/types'; +import EventView from 'sentry/utils/discover/eventView'; import getRouteStringFromRoutes from 'sentry/utils/getRouteStringFromRoutes'; import {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; import useReplayReader from 'sentry/utils/replays/hooks/useReplayReader'; import type RequestError from 'sentry/utils/requestError/requestError'; import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; +import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {useRoutes} from 'sentry/utils/useRoutes'; import useFullscreen from 'sentry/utils/window/useFullscreen'; @@ -40,6 +44,7 @@ import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import Breadcrumbs from 'sentry/views/replays/detail/breadcrumbs'; import BrowserOSIcons from 'sentry/views/replays/detail/browserOSIcons'; import FluidHeight from 'sentry/views/replays/detail/layout/fluidHeight'; +import {ReplayCell} from 'sentry/views/replays/replayTable/tableCell'; import type {ReplayRecord} from 'sentry/views/replays/types'; type Props = { @@ -53,6 +58,7 @@ type Props = { replaySlug: string; focusTab?: TabKey; fullReplayButtonProps?: Partial>; + issueCategory?: IssueCategory; }; function getReplayAnalyticsStatus({ @@ -80,14 +86,20 @@ function getReplayAnalyticsStatus({ function ReplayPreviewPlayer({ replayId, fullReplayButtonProps, + replayRecord, + issueCategory, }: { replayId: string; + replayRecord: ReplayRecord; fullReplayButtonProps?: Partial>; + issueCategory?: IssueCategory; }) { const routes = useRoutes(); + const location = useLocation(); const organization = useOrganization(); const [isSidebarOpen, setIsSidebarOpen] = useState(true); const {replay, currentTime} = useReplayContext(); + const eventView = EventView.fromLocation(location); const fullscreenRef = useRef(null); const {toggle: toggleFullscreen} = useFullscreen({ @@ -96,17 +108,29 @@ function ReplayPreviewPlayer({ const isFullscreen = useIsFullscreen(); const startOffsetMs = replay?.getStartOffsetMs() ?? 0; + const isRageClickIssue = issueCategory === IssueCategory.REPLAY; + const fullReplayUrl = { pathname: normalizeUrl(`/organizations/${organization.slug}/replays/${replayId}/`), query: { referrer: getRouteStringFromRoutes(routes), - t_main: TabKey.ERRORS, + t_main: isRageClickIssue ? TabKey.BREADCRUMBS : TabKey.ERRORS, t: (currentTime + startOffsetMs) / 1000, + f_b_type: isRageClickIssue ? 'rageOrDead' : undefined, }, }; return ( + {replayRecord && ( + + )} @@ -152,6 +176,7 @@ function ReplayClipPreview({ orgSlug, replaySlug, fullReplayButtonProps, + issueCategory, }: Props) { const clipWindow = useMemo( () => ({ @@ -221,6 +246,8 @@ function ReplayClipPreview({ )} @@ -261,7 +288,7 @@ const PreviewPlayerContainer = styled(FluidHeight)<{isSidebarOpen: boolean}>` const PlayerContainer = styled(FluidHeight)` position: relative; - max-height: 448px; + max-height: ${REPLAY_LOADING_HEIGHT + 16}px; `; const PlayerContextContainer = styled(FluidHeight)` @@ -276,7 +303,7 @@ const StaticPanel = styled(FluidHeight)` `; const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` - height: 400px; + height: ${REPLAY_LOADING_HEIGHT}px; margin-bottom: ${space(2)}; `; @@ -303,4 +330,8 @@ const ContextContainer = styled('div')` gap: ${space(1)}; `; +const ReplayCellNoPadding = styled(ReplayCell)` + padding: 0 0 ${space(1)}; +`; + export default ReplayClipPreview; diff --git a/static/app/components/events/eventReplay/replayPreview.tsx b/static/app/components/events/eventReplay/replayPreview.tsx index dee70c91421702..33ca9389c22e67 100644 --- a/static/app/components/events/eventReplay/replayPreview.tsx +++ b/static/app/components/events/eventReplay/replayPreview.tsx @@ -5,6 +5,7 @@ import styled from '@emotion/styled'; import {Alert} from 'sentry/components/alert'; import type {LinkButton} from 'sentry/components/button'; import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer'; +import {REPLAY_LOADING_HEIGHT} from 'sentry/components/events/eventReplay/constants'; import {StaticReplayPreview} from 'sentry/components/events/eventReplay/staticReplayPreview'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {Flex} from 'sentry/components/profiling/flex'; @@ -112,7 +113,7 @@ function ReplayPreview({ } const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)` - height: 400px; + height: ${REPLAY_LOADING_HEIGHT}px; margin-bottom: ${space(2)}; `; diff --git a/static/app/components/events/eventReplay/staticReplayPreview.tsx b/static/app/components/events/eventReplay/staticReplayPreview.tsx index fac32545b58069..0156e4110f7a0c 100644 --- a/static/app/components/events/eventReplay/staticReplayPreview.tsx +++ b/static/app/components/events/eventReplay/staticReplayPreview.tsx @@ -2,6 +2,7 @@ import {type ComponentProps, Fragment, useMemo} from 'react'; import styled from '@emotion/styled'; import {LinkButton} from 'sentry/components/button'; +import {REPLAY_LOADING_HEIGHT} from 'sentry/components/events/eventReplay/constants'; import {StaticReplayPreferences} from 'sentry/components/replays/preferences/replayPreferences'; import {Provider as ReplayContextProvider} from 'sentry/components/replays/replayContext'; import ReplayPlayer from 'sentry/components/replays/replayPlayer'; @@ -91,7 +92,7 @@ const PlayerContainer = styled(FluidHeight)` position: relative; background: ${p => p.theme.background}; gap: ${space(1)}; - max-height: 448px; + max-height: ${REPLAY_LOADING_HEIGHT + 16}px; `; const StaticPanel = styled(FluidHeight)` diff --git a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx index 4374d8cebc4158..c6cc119d566a05 100644 --- a/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx +++ b/static/app/components/events/interfaces/performance/spanEvidenceKeyValueList.tsx @@ -475,7 +475,7 @@ const makeTransactionNameRow = (event: Event, orgSlug: string, projectSlug?: str const makeRow = ( subject: KeyValueListDataItem['subject'], - value: KeyValueListDataItem['value'] | KeyValueListDataItem['value'][], + value: KeyValueListDataItem['value'], actionButton?: ReactNode ): KeyValueListDataItem => { const itemKey = kebabCase(subject); diff --git a/static/app/components/events/interfaces/request/index.spec.tsx b/static/app/components/events/interfaces/request/index.spec.tsx index 349775b91cb910..df27c7b78df44f 100644 --- a/static/app/components/events/interfaces/request/index.spec.tsx +++ b/static/app/components/events/interfaces/request/index.spec.tsx @@ -174,6 +174,8 @@ describe('Request entry', function () { expect(screen.getAllByText(/redacted/)).toHaveLength(5); + // Expand two levels down + await userEvent.click(await screen.findByLabelText('Expand')); await userEvent.click(await screen.findByLabelText('Expand')); expect(screen.getAllByText(/redacted/)).toHaveLength(7); diff --git a/static/app/components/feedback/feedbackOnboarding/feedbackOnboardingLayout.tsx b/static/app/components/feedback/feedbackOnboarding/feedbackOnboardingLayout.tsx index 6190e87172044c..1e404e0d59a489 100644 --- a/static/app/components/feedback/feedbackOnboarding/feedbackOnboardingLayout.tsx +++ b/static/app/components/feedback/feedbackOnboarding/feedbackOnboardingLayout.tsx @@ -23,8 +23,8 @@ export function FeedbackOnboardingLayout({ }: OnboardingLayoutProps) { const organization = useOrganization(); - const [email, setEmail] = useState(true); - const [name, setName] = useState(true); + const [email, setEmail] = useState(false); + const [name, setName] = useState(false); const {isLoading: isLoadingRegistry, data: registryData} = useSourcePackageRegistries(organization); diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index f96669948b06ff..8882ab74f54b1f 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -8,12 +8,13 @@ import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right. import {Button} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeedbackOnboardingLayout} from 'sentry/components/feedback/feedbackOnboarding/feedbackOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/feedback/feedbackOnboarding/useCurrentProjectState'; import useLoadFeedbackOnboardingDoc from 'sentry/components/feedback/feedbackOnboarding/useLoadFeedbackOnboardingDoc'; +import {CRASH_REPORT_HASH} from 'sentry/components/feedback/useFeedbackOnboarding'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {FeedbackOnboardingWebApiBanner} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {replayJsFrameworkOptions} from 'sentry/components/replaysOnboarding/utils'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; @@ -34,6 +35,7 @@ import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PlatformKey, Project, SelectValue} from 'sentry/types'; import useOrganization from 'sentry/utils/useOrganization'; +import {useRouteContext} from 'sentry/utils/useRouteContext'; import useUrlParams from 'sentry/utils/useUrlParams'; function FeedbackOnboardingSidebar(props: CommonSidebarProps) { @@ -43,12 +45,15 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; const hasProjectAccess = organization.access.includes('project:read'); - const {projects, currentProject, setCurrentProject} = useCurrentProjectState({ + const {allProjects, currentProject, setCurrentProject} = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, }); const projectSelectOptions = useMemo(() => { - const supportedProjectItems: SelectValue[] = projects + const supportedProjectItems: SelectValue[] = allProjects .sort((aProject, bProject) => { // if we're comparing two projects w/ or w/o feedback alphabetical sort if (aProject.hasNewFeedbacks === bProject.hasNewFeedbacks) { @@ -73,7 +78,7 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { options: supportedProjectItems, }, ]; - }, [projects]); + }, [allProjects]); if (!isActive || !hasProjectAccess || !currentProject) { return null; @@ -112,7 +117,9 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { ) } value={currentProject?.id} - onChange={opt => setCurrentProject(projects.find(p => p.id === opt.value))} + onChange={opt => + setCurrentProject(allProjects.find(p => p.id === opt.value)) + } triggerProps={{'aria-label': currentProject?.slug}} options={projectSelectOptions} position="bottom-end" @@ -147,6 +154,8 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { }>(jsFrameworkSelectOptions[0]); const defaultTab = 'npm'; + const {location} = useRouteContext(); + const crashReportOnboarding = location.hash === CRASH_REPORT_HASH; const {getParamValue: setupMode, setParamValue: setSetupMode} = useUrlParams( 'mode', @@ -168,9 +177,9 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { .filter(p => p !== 'javascript') .includes(currentPlatform.id); - const showRadioButtons = replayJsLoaderInstructionsPlatformList.includes( - currentPlatform.id - ); + const showRadioButtons = + replayJsLoaderInstructionsPlatformList.includes(currentPlatform.id) && + !crashReportOnboarding; function getJsFramework() { return ( @@ -197,7 +206,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { projectSlug: currentProject.slug, }); - if (webApiPlatform) { + if (webApiPlatform && !crashReportOnboarding) { return ; } @@ -245,7 +254,8 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { /> ) : ( newDocs?.platformOptions && - widgetPlatform && ( + widgetPlatform && + !crashReportOnboarding && ( {tct("I'm using [platformSelect]", { platformSelect: ( @@ -295,6 +305,9 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { } function getConfig() { + if (crashReportOnboarding) { + return 'crashReportOnboarding'; + } if (crashApiPlatform) { return 'feedbackOnboardingCrashApi'; } diff --git a/static/app/components/feedback/feedbackSetupPanel.tsx b/static/app/components/feedback/feedbackSetupPanel.tsx index 9c9f1530c5b1d8..9c7bda62c43014 100644 --- a/static/app/components/feedback/feedbackSetupPanel.tsx +++ b/static/app/components/feedback/feedbackSetupPanel.tsx @@ -37,7 +37,13 @@ export default function FeedbackSetupPanel() { )}

{hasNewOnboarding ? ( - ) : ( diff --git a/static/app/components/feedback/useFeedbackOnboarding.tsx b/static/app/components/feedback/useFeedbackOnboarding.tsx index 024c0fec28c936..cfe1e0f5ddb573 100644 --- a/static/app/components/feedback/useFeedbackOnboarding.tsx +++ b/static/app/components/feedback/useFeedbackOnboarding.tsx @@ -7,6 +7,9 @@ import useSelectedProjectsHaveField from 'sentry/utils/project/useSelectedProjec import useOrganization from 'sentry/utils/useOrganization'; import {useRouteContext} from 'sentry/utils/useRouteContext'; +export const CRASH_REPORT_HASH = '#crashreport-sidequest'; +export const FEEDBACK_HASH = '#feedback-sidequest'; + export default function useHaveSelectedProjectsSetupFeedback() { const {hasField: hasSetupOneFeedback, fetching} = useSelectedProjectsHaveField('hasFeedbacks'); @@ -24,8 +27,9 @@ export function useFeedbackOnboardingSidebarPanel() { const organization = useOrganization(); useEffect(() => { - if (location.hash === '#feedback-sidequest') { + if (location.hash === FEEDBACK_HASH || location.hash === CRASH_REPORT_HASH) { SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); + // this tracks clicks from both feedback index and issue details feedback tab trackAnalytics('feedback.list-view-setup-sidebar', { organization, }); @@ -34,9 +38,18 @@ export function useFeedbackOnboardingSidebarPanel() { const activateSidebar = useCallback((event: {preventDefault: () => void}) => { event.preventDefault(); - window.location.hash = 'feedback-sidequest'; + window.location.hash = FEEDBACK_HASH; SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); }, []); - return {activateSidebar}; + const activateSidebarIssueDetails = useCallback( + (event: {preventDefault: () => void}) => { + event.preventDefault(); + window.location.hash = CRASH_REPORT_HASH; + SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); + }, + [] + ); + + return {activateSidebar, activateSidebarIssueDetails}; } diff --git a/static/app/components/forms/fields/accessibility.spec.tsx b/static/app/components/forms/fields/accessibility.spec.tsx index d82303b4b13ffd..c923346fb20049 100644 --- a/static/app/components/forms/fields/accessibility.spec.tsx +++ b/static/app/components/forms/fields/accessibility.spec.tsx @@ -1,5 +1,3 @@ -import selectEvent from 'react-select-event'; - import { fireEvent, render, @@ -7,6 +5,7 @@ import { userEvent, within, } from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import Form from 'sentry/components/forms/form'; diff --git a/static/app/components/forms/fields/projectMapperField.spec.tsx b/static/app/components/forms/fields/projectMapperField.spec.tsx index 93713e2d6cd85f..8ad7136b8b4857 100644 --- a/static/app/components/forms/fields/projectMapperField.spec.tsx +++ b/static/app/components/forms/fields/projectMapperField.spec.tsx @@ -1,7 +1,7 @@ import type {ComponentProps} from 'react'; -import selectEvent from 'react-select-event'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import FormModel from 'sentry/components/forms/model'; @@ -79,15 +79,15 @@ describe('ProjectMapperField', () => { expect(defaultProps.onChange).toHaveBeenCalledWith([[24, 1]], []); }); - it('allows a single Sentry project to map to multiple items but not the value', () => { + it('allows a single Sentry project to map to multiple items but not the value', async () => { render(); // can find the same project again - selectEvent.openMenu(screen.getByText(/Sentry project/)); + await selectEvent.openMenu(screen.getByText(/Sentry project/)); expect(screen.getAllByText('beans')).toHaveLength(2); // but not the value - selectEvent.openMenu(screen.getByText('mapped-dropdown-placeholder')); + await selectEvent.openMenu(screen.getByText('mapped-dropdown-placeholder')); expect(screen.getByText('label 1')).toBeInTheDocument(); // validate we can still find 2 diff --git a/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx b/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx index 0d28766262aa03..7e0c70912d944e 100644 --- a/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx +++ b/static/app/components/forms/fields/sentryMemberTeamSelectorField.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {TeamFixture} from 'sentry-fixture/team'; import {UserFixture} from 'sentry-fixture/user'; import {render, screen} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import MemberListStore from 'sentry/stores/memberListStore'; import OrganizationStore from 'sentry/stores/organizationStore'; diff --git a/static/app/components/forms/fields/sentryProjectSelectorField.spec.tsx b/static/app/components/forms/fields/sentryProjectSelectorField.spec.tsx index 51a286beb78749..60446b05358511 100644 --- a/static/app/components/forms/fields/sentryProjectSelectorField.spec.tsx +++ b/static/app/components/forms/fields/sentryProjectSelectorField.spec.tsx @@ -1,7 +1,7 @@ -import selectEvent from 'react-select-event'; import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import SentryProjectSelectorField from './sentryProjectSelectorField'; diff --git a/static/app/components/group/groupPriority.spec.tsx b/static/app/components/group/groupPriority.spec.tsx new file mode 100644 index 00000000000000..06ac034c6a8a84 --- /dev/null +++ b/static/app/components/group/groupPriority.spec.tsx @@ -0,0 +1,54 @@ +import {ActivityFeedFixture} from 'sentry-fixture/activityFeed'; +import {UserFixture} from 'sentry-fixture/user'; + +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {textWithMarkupMatcher} from 'sentry-test/utils'; + +import {GroupPriorityDropdown} from 'sentry/components/group/groupPriority'; +import {GroupActivityType, PriorityLevel} from 'sentry/types'; + +describe('GroupPriority', function () { + describe('GroupPriorityDropdown', function () { + const defaultProps = { + groupId: '1', + onChange: jest.fn(), + value: PriorityLevel.HIGH, + }; + + it('skips request when sent lastEditedBy', async function () { + render(); + + await userEvent.click(screen.getByRole('button', {name: 'Modify issue priority'})); + + expect( + screen.getByText(textWithMarkupMatcher('Last edited by Sentry')) + ).toBeInTheDocument(); + }); + + it('fetches the last priority edit when not passed in', async function () { + MockApiClient.addMockResponse({ + url: '/issues/1/activities/', + body: { + activity: [ + ActivityFeedFixture({ + type: GroupActivityType.SET_PRIORITY, + user: UserFixture({name: 'John Doe'}), + }), + ActivityFeedFixture({ + type: GroupActivityType.SET_PRIORITY, + user: UserFixture({name: 'Other User'}), + }), + ], + }, + }); + + render(); + + await userEvent.click(screen.getByRole('button', {name: 'Modify issue priority'})); + + expect( + await screen.findByText(textWithMarkupMatcher('Last edited by John Doe')) + ).toBeInTheDocument(); + }); + }); +}); diff --git a/static/app/components/group/groupPriority.stories.tsx b/static/app/components/group/groupPriority.stories.tsx index 990a8f5b47d092..6cd95561a725a1 100644 --- a/static/app/components/group/groupPriority.stories.tsx +++ b/static/app/components/group/groupPriority.stories.tsx @@ -24,6 +24,13 @@ export const Dropdown = storyBook(GroupPriorityDropdown, story => { story('Default', () => { const [value, setValue] = useState(PriorityLevel.MEDIUM); - return ; + return ( + + ); }); }); diff --git a/static/app/components/group/groupPriority.tsx b/static/app/components/group/groupPriority.tsx index ffdf5f834ca7bd..3b686aa9b6bbf8 100644 --- a/static/app/components/group/groupPriority.tsx +++ b/static/app/components/group/groupPriority.tsx @@ -1,18 +1,31 @@ -import {useMemo} from 'react'; +import {useMemo, useRef} from 'react'; import type {Theme} from '@emotion/react'; import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; -import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; +import type {MenuItemProps} from 'sentry/components/dropdownMenu'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {DropdownMenuFooter} from 'sentry/components/dropdownMenu/footer'; +import useFeedbackWidget from 'sentry/components/feedback/widget/useFeedbackWidget'; +import Placeholder from 'sentry/components/placeholder'; import Tag from 'sentry/components/tag'; import {IconChevron} from 'sentry/icons'; -import {t} from 'sentry/locale'; +import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {PriorityLevel} from 'sentry/types'; +import { + type Activity, + type AvatarUser, + GroupActivityType, + PriorityLevel, +} from 'sentry/types'; +import {defined} from 'sentry/utils'; +import {useApiQuery} from 'sentry/utils/queryClient'; type GroupPriorityDropdownProps = { + groupId: string; onChange: (value: PriorityLevel) => void; value: PriorityLevel; + lastEditedBy?: 'system' | AvatarUser; }; type GroupPriorityBadgeProps = { @@ -22,7 +35,7 @@ type GroupPriorityBadgeProps = { const PRIORITY_KEY_TO_LABEL: Record = { [PriorityLevel.HIGH]: t('High'), - [PriorityLevel.MEDIUM]: t('Medium'), + [PriorityLevel.MEDIUM]: t('Med'), [PriorityLevel.LOW]: t('Low'), }; @@ -40,6 +53,33 @@ function getTagTypeForPriority(priority: string): keyof Theme['tag'] { } } +function useLastEditedBy({ + groupId, + lastEditedBy: incomingLastEditedBy, +}: Pick) { + const {data} = useApiQuery<{activity: Activity[]}>([`/issues/${groupId}/activities/`], { + enabled: !defined(incomingLastEditedBy), + staleTime: 0, + }); + + const lastEditedBy = useMemo(() => { + if (incomingLastEditedBy) { + return incomingLastEditedBy; + } + + if (!data) { + return null; + } + + return ( + data?.activity?.find(activity => activity.type === GroupActivityType.SET_PRIORITY) + ?.user ?? 'system' + ); + }, [data, incomingLastEditedBy]); + + return lastEditedBy; +} + export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps) { return ( @@ -49,7 +89,49 @@ export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps ); } -export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownProps) { +function PriorityChangeActor({ + groupId, + lastEditedBy, +}: Pick) { + const resolvedLastEditedBy = useLastEditedBy({groupId, lastEditedBy}); + + if (!resolvedLastEditedBy) { + return ; + } + + if (resolvedLastEditedBy === 'system') { + return Sentry; + } + + return {resolvedLastEditedBy.name}; +} + +function GroupPriorityFeedback() { + const buttonRef = useRef(null); + const feedback = useFeedbackWidget({buttonRef}); + + if (!feedback) { + return null; + } + + return ( + e.stopPropagation()} + > + {t('Give Feedback')} + + ); +} + +export function GroupPriorityDropdown({ + groupId, + value, + onChange, + lastEditedBy, +}: GroupPriorityDropdownProps) { const options: MenuItemProps[] = useMemo(() => { return PRIORITY_OPTIONS.map(priority => ({ textValue: PRIORITY_KEY_TO_LABEL[priority], @@ -62,8 +144,13 @@ export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownPr return ( +
{t('Set Priority')}
+ + + } + minMenuWidth={210} trigger={triggerProps => ( )} items={options} + menuFooter={ + +
+ {tct('Last edited by [name]', { + name: , + })} +
+
+ } + position="bottom-end" /> ); } @@ -95,3 +192,26 @@ const StyledTag = styled(Tag)` gap: ${space(0.5)}; } `; + +const InlinePlaceholder = styled(Placeholder)` + display: inline-block; + vertical-align: middle; +`; + +const MenuTitleContainer = styled('div')` + display: flex; + align-items: flex-end; + justify-content: space-between; +`; + +const StyledButton = styled(Button)` + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + font-weight: normal; + padding: 0; + border: none; + + &:hover { + color: ${p => p.theme.subText}; + } +`; diff --git a/static/app/components/group/sentryAppExternalIssueForm.spec.tsx b/static/app/components/group/sentryAppExternalIssueForm.spec.tsx index 224ff73e68a7f9..20508687d82868 100644 --- a/static/app/components/group/sentryAppExternalIssueForm.spec.tsx +++ b/static/app/components/group/sentryAppExternalIssueForm.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {EventFixture} from 'sentry-fixture/event'; import {GroupFixture} from 'sentry-fixture/group'; import {SentryAppFixture} from 'sentry-fixture/sentryApp'; @@ -10,6 +9,7 @@ import { import {SentryAppInstallationFixture} from 'sentry-fixture/sentryAppInstallation'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import SentryAppExternalIssueForm from 'sentry/components/group/sentryAppExternalIssueForm'; import {addQueryParamsToExistingUrl} from 'sentry/utils/queryString'; @@ -63,7 +63,7 @@ describe('SentryAppExternalIssueForm', () => { await userEvent.click(screen.getByRole('button', {name: 'Save Changes'})); expect(externalIssueRequest).not.toHaveBeenCalled(); - selectEvent.openMenu(screen.getByRole('textbox', {name: 'Numbers'})); + await selectEvent.openMenu(screen.getByRole('textbox', {name: 'Numbers'})); await userEvent.type(screen.getByRole('textbox', {name: 'Numbers'}), '1'); await userEvent.click(screen.getByText('one')); @@ -191,7 +191,7 @@ describe('SentryAppExternalIssueForm Async Field', () => { /> ); - selectEvent.openMenu(screen.getByText('Numbers')); + await selectEvent.openMenu(screen.getByText('Numbers')); await userEvent.type(screen.getByRole('textbox'), 'I'); expect(mockGetOptions).toHaveBeenCalled(); diff --git a/static/app/components/idBadge/projectBadge.tsx b/static/app/components/idBadge/projectBadge.tsx index 11dfe6882d6a93..5d47900e823244 100644 --- a/static/app/components/idBadge/projectBadge.tsx +++ b/static/app/components/idBadge/projectBadge.tsx @@ -5,9 +5,8 @@ import BadgeDisplayName from 'sentry/components/idBadge/badgeDisplayName'; import BaseBadge from 'sentry/components/idBadge/baseBadge'; import type {LinkProps} from 'sentry/components/links/link'; import Link from 'sentry/components/links/link'; -import type {Organization} from 'sentry/types'; import getPlatformName from 'sentry/utils/getPlatformName'; -import withOrganization from 'sentry/utils/withOrganization'; +import useOrganization from 'sentry/utils/useOrganization'; type BaseBadgeProps = React.ComponentProps; type Project = NonNullable; @@ -25,7 +24,6 @@ export interface ProjectBadgeProps * If true, will use default max-width, or specify one as a string */ hideOverflow?: boolean | string; - organization?: Organization; /** * Overrides where the project badge links */ @@ -34,7 +32,6 @@ export interface ProjectBadgeProps function ProjectBadge({ project, - organization, to, hideOverflow = true, disableLink = false, @@ -42,6 +39,7 @@ function ProjectBadge({ className, ...props }: ProjectBadgeProps) { + const organization = useOrganization(); const {slug, id} = project; const badge = ( @@ -81,4 +79,4 @@ const StyledLink = styled(Link)` } `; -export default withOrganization(ProjectBadge); +export default ProjectBadge; diff --git a/static/app/components/issues/groupListHeader.tsx b/static/app/components/issues/groupListHeader.tsx index b338d84d8667fc..784e5b79f0d1dc 100644 --- a/static/app/components/issues/groupListHeader.tsx +++ b/static/app/components/issues/groupListHeader.tsx @@ -84,7 +84,7 @@ const ChartWrapper = styled(Heading)<{narrowGroups: boolean}>` const PriorityWrapper = styled(Heading)<{narrowGroups: boolean}>` justify-content: flex-end; - width: 85px; + width: 70px; @media (max-width: ${p => p.narrowGroups ? p.theme.breakpoints.large : p.theme.breakpoints.medium}) { diff --git a/static/app/components/modals/inviteMembersModal/index.spec.tsx b/static/app/components/modals/inviteMembersModal/index.spec.tsx index c2e7220e8c19da..177ce2343d5bb2 100644 --- a/static/app/components/modals/inviteMembersModal/index.spec.tsx +++ b/static/app/components/modals/inviteMembersModal/index.spec.tsx @@ -1,10 +1,10 @@ import type {ComponentProps} from 'react'; -import selectEvent from 'react-select-event'; import styled from '@emotion/styled'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {TeamFixture} from 'sentry-fixture/team'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {textWithMarkupMatcher} from 'sentry-test/utils'; import {makeCloseButton} from 'sentry/components/globalModal/components'; diff --git a/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx b/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx index 2aa8b8b38e8558..5202ce13fdc02d 100644 --- a/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx +++ b/static/app/components/modals/inviteMissingMembersModal/index.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import styled from '@emotion/styled'; import {MissingMembersFixture} from 'sentry-fixture/missingMembers'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {TeamFixture} from 'sentry-fixture/team'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {makeCloseButton} from 'sentry/components/globalModal/components'; import type {InviteMissingMembersModalProps} from 'sentry/components/modals/inviteMissingMembersModal'; diff --git a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx index dd9044c40aa03f..2743bc6d0ef0bf 100644 --- a/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx +++ b/static/app/components/modals/savedSearchModal/createSavedSearchModal.spec.tsx @@ -1,7 +1,7 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import { makeClosableHeader, diff --git a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx index 4e8d2f79f2793b..7def9eca14e077 100644 --- a/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx +++ b/static/app/components/modals/savedSearchModal/editSavedSearchModal.spec.tsx @@ -1,7 +1,7 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import { makeClosableHeader, diff --git a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx index 08a089767952f6..b7926541bf3d63 100644 --- a/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx +++ b/static/app/components/modals/widgetBuilder/addToDashboardModal.spec.tsx @@ -1,8 +1,8 @@ -import selectEvent from 'react-select-event'; import {LocationFixture} from 'sentry-fixture/locationFixture'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import AddToDashboardModal from 'sentry/components/modals/widgetBuilder/addToDashboardModal'; @@ -183,7 +183,7 @@ describe('add to dashboard modal', () => { expect(screen.getByText('Select Dashboard')).toBeEnabled(); }); - selectEvent.openMenu(screen.getByText('Select Dashboard')); + await selectEvent.openMenu(screen.getByText('Select Dashboard')); expect(screen.getByText('+ Create New Dashboard')).toBeInTheDocument(); expect(screen.getByText('Test Dashboard')).toBeInTheDocument(); }); diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx index 867fbcf35706f9..5438574fdb5c90 100644 --- a/static/app/components/modals/widgetViewerModal.spec.tsx +++ b/static/app/components/modals/widgetViewerModal.spec.tsx @@ -4,6 +4,7 @@ import {ProjectFixture} from 'sentry-fixture/project'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import WidgetViewerModal from 'sentry/components/modals/widgetViewerModal'; @@ -1269,7 +1270,7 @@ describe('Modals -> WidgetViewerModal', function () { widgetType: WidgetType.RELEASE, }; beforeEach(function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); metricsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsTotalCountByReleaseIn24h(), @@ -1281,7 +1282,7 @@ describe('Modals -> WidgetViewerModal', function () { }); }); afterEach(() => { - jest.useRealTimers(); + resetMockDate(); }); it('does a sessions query', async function () { diff --git a/static/app/components/noProjectMessage.spec.tsx b/static/app/components/noProjectMessage.spec.tsx index ffc898c5be33b2..9f640b9a0366b5 100644 --- a/static/app/components/noProjectMessage.spec.tsx +++ b/static/app/components/noProjectMessage.spec.tsx @@ -18,15 +18,16 @@ describe('NoProjectMessage', function () { it('renders', function () { const organization = OrganizationFixture({slug: 'org-slug'}); - const childrenMock = jest.fn().mockReturnValue(null); ProjectsStore.loadInitialData([]); render( - {childrenMock} + +
Test
+
); - expect(childrenMock).not.toHaveBeenCalled(); expect(screen.getByText('Remain Calm')).toBeInTheDocument(); + expect(screen.queryByTestId('child')).not.toBeInTheDocument(); }); it('shows "Create Project" button when there are no projects', function () { diff --git a/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx b/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx new file mode 100644 index 00000000000000..0d5bad506b7c14 --- /dev/null +++ b/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx @@ -0,0 +1,16 @@ +import Alert from 'sentry/components/alert'; +import ExternalLink from 'sentry/components/links/externalLink'; +import {tct} from 'sentry/locale'; + +export default function widgetCallout({link}: {link: string}) { + return ( + + {tct( + `Want to receive user feedback at any time, not just when an error happens? [link:Read the docs] to learn how to set up our customizable widget.`, + { + link: , + } + )} + + ); +} diff --git a/static/app/components/onboarding/gettingStartedDoc/types.ts b/static/app/components/onboarding/gettingStartedDoc/types.ts index a1be3b5c67f279..5c0eb1ed0363bf 100644 --- a/static/app/components/onboarding/gettingStartedDoc/types.ts +++ b/static/app/components/onboarding/gettingStartedDoc/types.ts @@ -80,6 +80,7 @@ export interface OnboardingConfig< export interface Docs { onboarding: OnboardingConfig; + crashReportOnboarding?: OnboardingConfig; customMetricsOnboarding?: OnboardingConfig; feedbackOnboardingCrashApi?: OnboardingConfig; feedbackOnboardingNpm?: OnboardingConfig; @@ -92,6 +93,7 @@ export type ConfigType = | 'onboarding' | 'feedbackOnboardingNpm' | 'feedbackOnboardingCrashApi' + | 'crashReportOnboarding' | 'replayOnboardingNpm' | 'replayOnboardingJsLoader' | 'customMetricsOnboarding'; diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx index 2bfd5be647abd7..c5ec3080a2ba2a 100644 --- a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx @@ -2,13 +2,20 @@ import Alert from 'sentry/components/alert'; import ExternalLink from 'sentry/components/links/externalLink'; import {t, tct} from 'sentry/locale'; -export const getFeedbackConfigureDescription = ({link}: {link: string}) => +export const getFeedbackConfigureDescription = ({ + linkConfig, + linkButton, +}: { + linkButton: string; + linkConfig: string; +}) => tct( - 'To set up the integration, add the following to your Sentry initialization. There are many options you can pass to the [code:integrations] constructor to customize your form. [break] [break] You can even link the widget to a custom button if you don’t want to use our autoinjected floating button. Learn more about configuring User Feedback by reading the [link:configuration docs].', + 'To set up the integration, add the following to your Sentry initialization. There are many options you can pass to the [code:integrations] constructor to customize your form. [break] [break] You can even [linkButton:link the widget to a custom button] if you don’t want to use our auto-injected floating button. Learn more about configuring User Feedback by reading the [linkConfig:configuration docs].', { code: , break:
, - link: , + linkConfig: , + linkButton: , } ); @@ -28,7 +35,7 @@ export const getFeedbackSDKSetupSnippet = ({ integrations: [ Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(feedbackOptions)}}), ], });`; @@ -75,3 +82,44 @@ export const getFeedbackConfigOptions = ({ } return options.join('\n'); }; + +export const getCrashReportModalIntroduction = () => + t( + 'Collect feedback on your errors by installing our crash-report modal. This allows users to submit feedback after they experience an error via an automatic modal that pops up after an error occurs. The default modal will prompt the user for their name, email address, and description of what occurred.' + ); + +export const getCrashReportModalInstallDescriptionJavaScript = () => + tct( + 'You can collect feedback at the time the event is sent, using [code:beforeSend].', + {code: } + ); + +export const getCrashReportModalConfigDescription = ({link}: {link: string}) => + tct( + 'There are many options you can pass to the [code:Sentry.showReportDialog] call to customize your form. Learn more about configuring the modal by reading the [link:configuration docs].', + {code: , link: } + ); + +export const getCrashReportModalSnippetJavaScript = params => [ + { + code: [ + { + label: 'HTML', + value: 'html', + language: 'html', + code: ``, + }, + ], + }, +]; diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx new file mode 100644 index 00000000000000..f6417896ae768f --- /dev/null +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx @@ -0,0 +1,138 @@ +import {ProjectFixture} from 'sentry-fixture/project'; + +import {reactHooks} from 'sentry-test/reactTestingLibrary'; + +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, + feedbackOnboardingPlatforms, + replayOnboardingPlatforms, + replayPlatforms, +} from 'sentry/data/platformCategories'; +import PageFiltersStore from 'sentry/stores/pageFiltersStore'; +import ProjectsStore from 'sentry/stores/projectsStore'; +import type {Project} from 'sentry/types'; + +function mockPageFilterStore(projects: Project[]) { + PageFiltersStore.init(); + PageFiltersStore.onInitializeUrlState( + { + projects: projects.map(p => parseInt(p.id, 10)), + environments: [], + datetime: { + period: '7d', + start: null, + end: null, + utc: null, + }, + }, + new Set() + ); +} + +describe('useCurrentProjectState', () => { + const rust_1 = ProjectFixture({id: '1', platform: 'rust'}); + const rust_2 = ProjectFixture({id: '2', platform: 'rust'}); + const javascript = ProjectFixture({id: '3', platform: 'javascript'}); + const angular = ProjectFixture({id: '4', platform: 'javascript-angular'}); + + it('should return currentProject=undefined when currentPanel != targetPanel', () => { + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should return the currentProject when currentPanel = targetPanel', () => { + ProjectsStore.loadInitialData([javascript]); + mockPageFilterStore([javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.METRICS_ONBOARDING, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return the first project if global selection does not have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([rust_1, rust_2]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(rust_1); + }); + + it('should return the first onboarding project', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([rust_1, javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(rust_1); + }); + + it('should return the first project if no selection', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return undefined if no selection and no projects have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should override current project if setCurrentProjects is called', () => { + ProjectsStore.loadInitialData([javascript, angular]); + mockPageFilterStore([javascript, angular]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + reactHooks.act(() => result.current.setCurrentProject(angular)); + expect(result.current.currentProject).toBe(angular); + }); +}); diff --git a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx similarity index 53% rename from static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx rename to static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx index b00fa5c0f6e9e0..e1ad3d0522a36e 100644 --- a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx @@ -1,24 +1,40 @@ -import {useEffect, useState} from 'react'; +import {useEffect, useMemo, useState} from 'react'; +import partition from 'lodash/partition'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {feedbackOnboardingPlatforms} from 'sentry/data/platformCategories'; +import type {SidebarPanelKey} from 'sentry/components/sidebar/types'; import PageFiltersStore from 'sentry/stores/pageFiltersStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; +import type {PlatformKey, Project} from 'sentry/types'; import useProjects from 'sentry/utils/useProjects'; -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { +type Props = { + allPlatforms: readonly PlatformKey[]; + currentPanel: '' | SidebarPanelKey; + onboardingPlatforms: readonly PlatformKey[]; + targetPanel: SidebarPanelKey; +}; + +function useCurrentProjectState({ + currentPanel, + targetPanel, + onboardingPlatforms, + allPlatforms, +}: Props) { const [currentProject, setCurrentProject] = useState(undefined); const {projects, initiallyLoaded: projectsLoaded} = useProjects(); const {selection, isReady} = useLegacyStore(PageFiltersStore); - const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; + const isActive = currentPanel === targetPanel; // Projects with onboarding instructions const projectsWithOnboarding = projects.filter( - p => p.platform && feedbackOnboardingPlatforms.includes(p.platform) + p => p.platform && onboardingPlatforms.includes(p.platform) ); + const [supportedProjects, unsupportedProjects] = useMemo(() => { + return partition(projects, p => p.platform && allPlatforms.includes(p.platform)); + }, [projects, allPlatforms]); + useEffect(() => { if (!isActive) { setCurrentProject(undefined); @@ -30,7 +46,8 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel !projectsLoaded || !projects.length || !isReady || - !projectsWithOnboarding + !projectsWithOnboarding || + !supportedProjects ) { return; } @@ -48,13 +65,23 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel return; } + // If we selected something that supports the product pick that + const projectSupportsProduct = supportedProjects.find(p => + selectedProjectIds.includes(p.id) + ); + + if (projectSupportsProduct) { + setCurrentProject(projectSupportsProduct); + return; + } + // Otherwise, just pick the first selected project const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); setCurrentProject(firstSelectedProject); return; } // No selection, so pick the first project with onboarding - setCurrentProject(projectsWithOnboarding.at(0)); + setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); return; }, [ currentProject, @@ -64,13 +91,18 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel isActive, selection.projects, projectsWithOnboarding, + supportedProjects, ]); return { - projectsWithOnboarding, - projects, + projects: supportedProjects, + allProjects: projects, currentProject, setCurrentProject, + hasDocs: + !!currentProject?.platform && onboardingPlatforms.includes(currentProject.platform), + supportedProjects, + unsupportedProjects, }; } diff --git a/static/app/components/replays/playerDOMAlert.spec.tsx b/static/app/components/replays/playerDOMAlert.spec.tsx index cd8ed860b59810..6e31c67fde9325 100644 --- a/static/app/components/replays/playerDOMAlert.spec.tsx +++ b/static/app/components/replays/playerDOMAlert.spec.tsx @@ -1,20 +1,23 @@ -import {render, screen} from 'sentry-test/reactTestingLibrary'; +import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import localStorage from 'sentry/utils/localStorage'; import PlayerDOMAlert from './playerDOMAlert'; jest.mock('sentry/utils/localStorage'); -jest.useFakeTimers(); const mockGetItem = jest.mocked(localStorage.getItem); const now = new Date('2020-01-01'); -jest.setSystemTime(now); describe('PlayerDOMAlert', () => { beforeEach(() => { mockGetItem.mockReset(); + setMockDate(now); + }); + afterEach(() => { + resetMockDate(); }); it('should render the alert when local storage key is not set', () => { @@ -30,18 +33,19 @@ describe('PlayerDOMAlert', () => { expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument(); }); - it('should be dismissable', () => { + it('should be dismissable', async () => { render(); expect(screen.getByTestId('player-dom-alert')).toBeVisible(); screen.getByLabelText('Close Alert').click(); - jest.runAllTicks(); expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument(); - expect(localStorage.setItem).toHaveBeenCalledWith( - 'replay-player-dom-alert-dismissed', - '"1577836800000"' + await waitFor(() => + expect(localStorage.setItem).toHaveBeenCalledWith( + 'replay-player-dom-alert-dismissed', + '"1577836800000"' + ) ); }); }); diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx index 00a7d06f7c905f..44307fe38609b3 100644 --- a/static/app/components/replaysOnboarding/sidebar.tsx +++ b/static/app/components/replaysOnboarding/sidebar.tsx @@ -10,10 +10,10 @@ import {CompactSelect} from 'sentry/components/compactSelect'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import useOnboardingDocs from 'sentry/components/onboardingWizard/useOnboardingDocs'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {ReplayOnboardingLayout} from 'sentry/components/replaysOnboarding/replayOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/replaysOnboarding/useCurrentProjectState'; import useLoadOnboardingDoc from 'sentry/components/replaysOnboarding/useLoadOnboardingDoc'; import { generateDocKeys, @@ -30,6 +30,7 @@ import { replayBackendPlatforms, replayFrontendPlatforms, replayJsLoaderInstructionsPlatformList, + replayOnboardingPlatforms, replayPlatforms, } from 'sentry/data/platformCategories'; import platforms, {otherPlatform} from 'sentry/data/platforms'; @@ -59,6 +60,9 @@ function ReplaysOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, } = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, }); const projectSelectOptions = useMemo(() => { diff --git a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx b/static/app/components/replaysOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index da95b94d335b43..00000000000000 --- a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,99 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; - -import {splitProjectsByReplaySupport} from 'sentry/components/replaysOnboarding/utils'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {replayOnboardingPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const isActive = currentPanel === SidebarPanelKey.REPLAYS_ONBOARDING; - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - projects.filter( - p => p.platform && replayOnboardingPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - // Projects that support replays, but we haven't created the onboarding instructions (yet): - const projectWithReplaySupport = useMemo( - () => - projects.filter( - p => p.platform && replayPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!projectWithReplaySupport) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectForOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectForOnboarding) { - setCurrentProject(projectForOnboarding); - } - - // If we selected something that supports replays pick that - const projectSupportsReplay = projectWithReplaySupport.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsReplay) { - setCurrentProject(projectSupportsReplay); - } - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - // We have no selection, so pick a project which we've found - setCurrentProject(projectsWithOnboarding.at(0) || projectWithReplaySupport.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - projectWithReplaySupport, - ]); - - const {supported, unsupported} = useMemo(() => { - return splitProjectsByReplaySupport(projects); - }, [projects]); - - return { - projects: projectWithReplaySupport, - allProjects: projects, - supportedProjects: supported, - unsupportedProjects: unsupported, - currentProject, - setCurrentProject, - }; -} - -export default useCurrentProjectState; diff --git a/static/app/components/replaysOnboarding/utils.tsx b/static/app/components/replaysOnboarding/utils.tsx index 032a0b1575a4d2..53b9ec15ada10f 100644 --- a/static/app/components/replaysOnboarding/utils.tsx +++ b/static/app/components/replaysOnboarding/utils.tsx @@ -1,8 +1,6 @@ -import partition from 'lodash/partition'; - import {replayFrontendPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; -import type {PlatformIntegration, PlatformKey, Project} from 'sentry/types'; +import type {PlatformIntegration, PlatformKey} from 'sentry/types'; export function generateDocKeys(platform: PlatformKey): string[] { const platformKey = platform.startsWith('javascript') @@ -17,16 +15,6 @@ export function isPlatformSupported(platform: undefined | PlatformIntegration) { return platform?.id ? replayPlatforms.includes(platform?.id) : false; } -export function splitProjectsByReplaySupport(projects: Project[]) { - const [supported, unsupported] = partition(projects, project => - replayPlatforms.includes(project.platform!) - ); - return { - supported, - unsupported, - }; -} - export const replayJsFrameworkOptions: PlatformIntegration[] = platforms.filter(p => replayFrontendPlatforms.includes(p.id) ); diff --git a/static/app/components/sidebar/broadcastSdkUpdates.tsx b/static/app/components/sidebar/broadcastSdkUpdates.tsx index 1680df0b2a6a5d..7a39ff09330d43 100644 --- a/static/app/components/sidebar/broadcastSdkUpdates.tsx +++ b/static/app/components/sidebar/broadcastSdkUpdates.tsx @@ -75,7 +75,7 @@ function BroadcastSdkUpdates({projects, sdkUpdates, organization}: Props) { return (
- + {isDeprecated && {t('Deprecated')}}
diff --git a/static/app/components/stream/group.spec.tsx b/static/app/components/stream/group.spec.tsx index dd78a5d9831dea..97e53995ac5c91 100644 --- a/static/app/components/stream/group.spec.tsx +++ b/static/app/components/stream/group.spec.tsx @@ -102,7 +102,7 @@ describe('StreamGroup', function () { }); const priorityDropdown = screen.getByRole('button', {name: 'Modify issue priority'}); - expect(within(priorityDropdown).getByText('Medium')).toBeInTheDocument(); + expect(within(priorityDropdown).getByText('Med')).toBeInTheDocument(); await userEvent.click(priorityDropdown); await userEvent.click(screen.getByRole('menuitemradio', {name: 'High'})); expect(within(priorityDropdown).getByText('High')).toBeInTheDocument(); diff --git a/static/app/components/stream/group.tsx b/static/app/components/stream/group.tsx index ce33a92887c0c3..6d0cf995f27967 100644 --- a/static/app/components/stream/group.tsx +++ b/static/app/components/stream/group.tsx @@ -612,7 +612,7 @@ const EventCountsWrapper = styled('div')` `; const PriorityWrapper = styled('div')<{narrowGroups: boolean}>` - width: 85px; + width: 70px; margin: 0 ${space(2)}; align-self: center; display: flex; diff --git a/static/app/components/structuredEventData/collapsibleValue.tsx b/static/app/components/structuredEventData/collapsibleValue.tsx new file mode 100644 index 00000000000000..a979682f878fbc --- /dev/null +++ b/static/app/components/structuredEventData/collapsibleValue.tsx @@ -0,0 +1,112 @@ +import {Children, useState} from 'react'; +import {css} from '@emotion/react'; +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import {IconChevron} from 'sentry/icons'; +import {t, tn} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; + +type CollapsibleValueProps = { + children: React.ReactNode; + closeTag: string; + depth: number; + maxDefaultDepth: number; + openTag: string; + prefix?: React.ReactNode; +}; + +const MAX_ITEMS_BEFORE_AUTOCOLLAPSE = 5; + +export function CollapsibleValue({ + children, + openTag, + closeTag, + prefix = null, + depth, + maxDefaultDepth, +}: CollapsibleValueProps) { + const numChildren = Children.count(children); + const [isExpanded, setIsExpanded] = useState( + numChildren <= MAX_ITEMS_BEFORE_AUTOCOLLAPSE && depth < maxDefaultDepth + ); + + const shouldShowToggleButton = numChildren > 0; + const isBaseLevel = depth === 0; + + // Toggle buttons get placed to the left of the open tag, but if this is the + // base level there is no room for it. So we add padding in this case. + const baseLevelPadding = isBaseLevel && shouldShowToggleButton; + + return ( + + {numChildren > 0 ? ( + setIsExpanded(oldValue => !oldValue)} + icon={ + + } + borderless + baseLevelPadding={baseLevelPadding} + /> + ) : null} + {prefix} + {openTag} + {shouldShowToggleButton && !isExpanded ? ( + setIsExpanded(true)}> + {tn('%s item', '%s items', numChildren)} + + ) : null} + {shouldShowToggleButton && isExpanded ? ( + {children} + ) : null} + {closeTag} + + ); +} + +const CollapsibleDataContainer = styled('span')<{baseLevelPadding: boolean}>` + position: relative; + + ${p => + p.baseLevelPadding && + css` + display: block; + padding-left: ${space(3)}; + `} +`; + +const IndentedValues = styled('div')` + padding-left: ${space(1.5)}; +`; + +const NumItemsButton = styled(Button)` + background: none; + border: none; + padding: 0 2px; + border-radius: 2px; + font-weight: normal; + box-shadow: none; + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + margin: 0 ${space(0.5)}; +`; + +const ToggleButton = styled(Button)<{baseLevelPadding: boolean}>` + position: absolute; + left: -${space(3)}; + top: 2px; + border-radius: 2px; + align-items: center; + justify-content: center; + background: none; + border: none; + + ${p => + p.baseLevelPadding && + css` + left: 0; + `} +`; diff --git a/static/app/components/structuredEventData/index.spec.tsx b/static/app/components/structuredEventData/index.spec.tsx index b86cc8803a5ffd..513a1cabbad853 100644 --- a/static/app/components/structuredEventData/index.spec.tsx +++ b/static/app/components/structuredEventData/index.spec.tsx @@ -1,4 +1,4 @@ -import {render, screen, within} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary'; import StructuredEventData from 'sentry/components/structuredEventData'; @@ -60,4 +60,42 @@ describe('ContextData', function () { ).toBeInTheDocument(); }); }); + + describe('collpasible values', function () { + it('auto-collapses objects/arrays with more than 5 items', async function () { + render( + + ); + + expect(screen.getByText('one_child_value')).toBeInTheDocument(); + expect(screen.queryByText('two_child_value')).not.toBeInTheDocument(); + + // Click the "6 items" button to expand the object + await userEvent.click(screen.getByRole('button', {name: '6 items'})); + expect(screen.getByText('two_child_value')).toBeInTheDocument(); + }); + }); + + it('auto-collapses objects/arrays after max depth', async function () { + render(); + + expect(screen.getByText('1')).toBeInTheDocument(); + expect(screen.queryByText('2')).not.toBeInTheDocument(); + + // Click the "2 items" button to expand the array + await userEvent.click(screen.getByRole('button', {name: '2 items'})); + expect(screen.getByText('3')).toBeInTheDocument(); + }); }); diff --git a/static/app/components/structuredEventData/index.stories.tsx b/static/app/components/structuredEventData/index.stories.tsx index 94278575cd6490..eb74ee93f1b602 100644 --- a/static/app/components/structuredEventData/index.stories.tsx +++ b/static/app/components/structuredEventData/index.stories.tsx @@ -18,8 +18,8 @@ export default storyBook(StructuredEventData, story => { - - + + ); }); diff --git a/static/app/components/structuredEventData/index.tsx b/static/app/components/structuredEventData/index.tsx index a2f826443ef6f3..9420f539c8eecd 100644 --- a/static/app/components/structuredEventData/index.tsx +++ b/static/app/components/structuredEventData/index.tsx @@ -3,11 +3,11 @@ import styled from '@emotion/styled'; import {AnnotatedText} from 'sentry/components/events/meta/annotatedText'; import ExternalLink from 'sentry/components/links/externalLink'; +import {CollapsibleValue} from 'sentry/components/structuredEventData/collapsibleValue'; import {IconOpen} from 'sentry/icons'; import {t} from 'sentry/locale'; import {isUrl} from 'sentry/utils'; -import Toggle from './toggle'; import { looksLikeMultiLineString, looksLikeStrippedValue, @@ -75,30 +75,50 @@ function StructuredData({ maxDefaultDepth, withAnnotatedText, meta, + objectKey, }: { config: StructedEventDataConfig | undefined; depth: number; maxDefaultDepth: number; meta: Record | undefined; withAnnotatedText: boolean; + objectKey?: string; // TODO(TS): What possible types can `value` be? value?: any; }) { let i = 0; - const children: React.ReactNode[] = []; + const formattedObjectKey = objectKey ? ( + + + {config?.renderObjectKeys?.(objectKey) ?? objectKey} + + {': '} + + ) : null; + + function Wrapper({children}: {children: React.ReactNode}) { + return ( + + {formattedObjectKey} + {children} + + ); + } if (config?.isNull?.(value) || value === null) { const nullValue = config?.renderNull?.(value) ?? String(value); return ( - - - + + + + + ); } @@ -106,21 +126,29 @@ function StructuredData({ const booleanValue = config?.renderBoolean?.(value) ?? String(value); return ( - - - + + + + + ); } if (typeof value === 'number' || config?.isNumber?.(value)) { return ( - - - + + + + + ); } @@ -129,45 +157,63 @@ function StructuredData({ const stringValue = config.renderString?.(value) ?? value; return ( - - {'"'} - - {'"'} - - + + + {'"'} + + {'"'} + + + ); } if (looksLikeStrippedValue(value)) { return ( - + + + + + + ); + } + + if (looksLikeMultiLineString(value)) { + + - - ); - } - - if (looksLikeMultiLineString(value)) { - - - ; + + ; } return ( - - - - + + + + + + ); } + const children: React.ReactNode[] = []; + if (Array.isArray(value)) { for (i = 0; i < value.length; i++) { children.push( @@ -180,21 +226,27 @@ function StructuredData({ meta={meta?.[i]} maxDefaultDepth={maxDefaultDepth} /> - {i < value.length - 1 ? {', '} : null} + {i < value.length - 1 ? {','} : null}
); } return ( - - {'['} - {children} - {']'} - + + {children} + ); } + if (isValidElement(value)) { return value; } + const keys = Object.keys(value); keys.sort(naturalCaseInsensitiveSort); for (i = 0; i < keys.length; i++) { @@ -202,29 +254,30 @@ function StructuredData({ children.push(
- {config?.renderObjectKeys?.(key) ?? key} - {': '} - - - {i < keys.length - 1 ? {', '} : null} - + + {i < keys.length - 1 ? {','} : null}
); } return ( - - {'{'} - {children} - {'}'} - + + {children} + ); } diff --git a/static/app/components/structuredEventData/toggle.tsx b/static/app/components/structuredEventData/toggle.tsx deleted file mode 100644 index 3cff6e17a57a17..00000000000000 --- a/static/app/components/structuredEventData/toggle.tsx +++ /dev/null @@ -1,75 +0,0 @@ -import {Children, useState} from 'react'; -import styled from '@emotion/styled'; - -import {IconAdd, IconSubtract} from 'sentry/icons'; -import {t} from 'sentry/locale'; - -type Props = { - children: React.ReactNode; - highUp: boolean; -}; - -function Toggle({highUp, children}: Props) { - const [isExpanded, setIsExpanded] = useState(false); - - if (Children.count(children) === 0) { - return null; - } - - const wrappedChildren = {children}; - - if (highUp) { - return wrappedChildren; - } - - return ( - - { - setIsExpanded(!isExpanded); - evt.preventDefault(); - }} - > - {isExpanded ? ( - - ) : ( - - )} - - {isExpanded && wrappedChildren} - - ); -} - -export default Toggle; - -const IconWrapper = styled('div')<{isExpanded: boolean}>` - border-radius: 2px; - display: inline-flex; - align-items: center; - justify-content: center; - cursor: pointer; - ${p => - p.isExpanded - ? ` - background: ${p.theme.gray300}; - border: 1px solid ${p.theme.gray300}; - &:hover { - background: ${p.theme.gray400}; - } - ` - : ` - background: ${p.theme.blue300}; - border: 1px solid ${p.theme.blue300}; - &:hover { - background: ${p.theme.blue200}; - } - `} -`; - -const ValueWrapper = styled('span')` - display: block; - padding: 0 0 0 15px; -`; diff --git a/static/app/components/teamSelector.spec.tsx b/static/app/components/teamSelector.spec.tsx index 363a6a2f04cba3..8766f714e989c7 100644 --- a/static/app/components/teamSelector.spec.tsx +++ b/static/app/components/teamSelector.spec.tsx @@ -1,9 +1,9 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {TeamFixture} from 'sentry-fixture/team'; import {act, render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {openCreateTeamModal} from 'sentry/actionCreators/modal'; import {addTeamToProject} from 'sentry/actionCreators/projects'; diff --git a/static/app/components/timeRangeSelector/index.tsx b/static/app/components/timeRangeSelector/index.tsx index ee4daf34e06c5e..d3a7299000c053 100644 --- a/static/app/components/timeRangeSelector/index.tsx +++ b/static/app/components/timeRangeSelector/index.tsx @@ -189,7 +189,7 @@ export function TimeRangeSelector({ value: item.value, // Wrap inside OptionLabel to offset custom margins from SelectorItemLabel // TODO: Remove SelectorItemLabel & OptionLabel - label: {item.label as string}, + label: {item.label}, details: start && end ? ( {getAbsoluteSummary(start, end, utc)} @@ -207,7 +207,7 @@ export function TimeRangeSelector({ return { value: item.value, - label: {item.label as string}, + label: {item.label}, textValue: item.searchKey, }; }); @@ -224,7 +224,7 @@ export function TimeRangeSelector({ return filteredItems.map>(item => ({ value: item.value, - label: item.label as string, + label: item.label, textValue: item.searchKey, })); }, diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 3da0f1538d171f..bc76b683d4943f 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -421,9 +421,9 @@ export const feedbackWebApiPlatforms: readonly PlatformKey[] = [ 'cordova', 'ruby-rack', 'ruby', + 'rust', 'native', 'native-qt', - 'native', 'node-awslambda', 'node-azurefunctions', 'node-connect', @@ -517,17 +517,17 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [ ]; // These are all the platforms that can set up custom metrics. -export const customMetricPlatforms: Set = new Set([ +export const customMetricPlatforms: readonly PlatformKey[] = [ ...customMetricFrontendPlatforms, ...customMetricBackendPlatforms, -]); +]; /** * The list of platforms for which we have created onboarding instructions. * Should be a subset of the list of `customMetricPlatforms`. */ -export const customMetricOnboardingPlatforms = new Set( - [...customMetricPlatforms].filter( +export const customMetricOnboardingPlatforms: readonly PlatformKey[] = + customMetricPlatforms.filter( p => // Legacy platforms that do not have in-product docs ![ @@ -537,5 +537,4 @@ export const customMetricOnboardingPlatforms = new Set( 'python-pylons', 'python-tryton', ].includes(p) - ) -); + ); diff --git a/static/app/gettingStartedDocs/capacitor/capacitor.tsx b/static/app/gettingStartedDocs/capacitor/capacitor.tsx index 9c5153648717d3..a83bb1a9185fe7 100644 --- a/static/app/gettingStartedDocs/capacitor/capacitor.tsx +++ b/static/app/gettingStartedDocs/capacitor/capacitor.tsx @@ -83,7 +83,7 @@ const getSentryInitLayout = (params: Params, siblingOption: string): string => { ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -446,7 +446,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/configuration/#bring-your-own-button', }), configurations: getSetupConfiguration({ params, diff --git a/static/app/gettingStartedDocs/electron/electron.tsx b/static/app/gettingStartedDocs/electron/electron.tsx index 705805d5b9cc91..bd5d80b1f40190 100644 --- a/static/app/gettingStartedDocs/electron/electron.tsx +++ b/static/app/gettingStartedDocs/electron/electron.tsx @@ -268,7 +268,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx index e1fbb1fa140d13..94d681285cda6d 100644 --- a/static/app/gettingStartedDocs/javascript/angular.tsx +++ b/static/app/gettingStartedDocs/javascript/angular.tsx @@ -218,7 +218,7 @@ function getSdkSetupSnippet(params: Params) { ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -324,7 +324,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx index 0a36e3da2ea2b9..8685871cab112b 100644 --- a/static/app/gettingStartedDocs/javascript/astro.tsx +++ b/static/app/gettingStartedDocs/javascript/astro.tsx @@ -250,7 +250,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx index 43d2cf1e2afe4d..3f915b31f4e01e 100644 --- a/static/app/gettingStartedDocs/javascript/ember.tsx +++ b/static/app/gettingStartedDocs/javascript/ember.tsx @@ -40,7 +40,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' } @@ -214,7 +214,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx index fea36336302ed3..d8a0d19657933b 100644 --- a/static/app/gettingStartedDocs/javascript/gatsby.tsx +++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx @@ -37,7 +37,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -248,7 +248,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx index 6b7b24e8a3cf8b..5513972f934aa1 100644 --- a/static/app/gettingStartedDocs/javascript/javascript.tsx +++ b/static/app/gettingStartedDocs/javascript/javascript.tsx @@ -1,4 +1,5 @@ import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout'; +import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout'; import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage'; import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; import type { @@ -8,6 +9,10 @@ import type { } from 'sentry/components/onboarding/gettingStartedDoc/types'; import {getUploadSourceMapsStep} from 'sentry/components/onboarding/gettingStartedDoc/utils'; import { + getCrashReportModalConfigDescription, + getCrashReportModalInstallDescriptionJavaScript, + getCrashReportModalIntroduction, + getCrashReportModalSnippetJavaScript, getFeedbackConfigOptions, getFeedbackConfigureDescription, } from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; @@ -36,7 +41,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -215,7 +220,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { @@ -238,12 +246,37 @@ const feedbackOnboarding: OnboardingConfig = { nextSteps: () => [], }; +const crashReportOnboarding: OnboardingConfig = { + introduction: () => getCrashReportModalIntroduction(), + install: (params: Params) => [ + { + type: StepType.INSTALL, + description: getCrashReportModalInstallDescriptionJavaScript(), + configurations: getCrashReportModalSnippetJavaScript(params), + }, + ], + configure: () => [ + { + type: StepType.CONFIGURE, + description: getCrashReportModalConfigDescription({ + link: 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/#crash-report-modal', + }), + additionalInfo: widgetCallout({ + link: 'https://docs.sentry.io/platforms/javascript/user-feedback/#user-feedback-widget', + }), + }, + ], + verify: () => [], + nextSteps: () => [], +}; + const docs: Docs = { onboarding, feedbackOnboardingNpm: feedbackOnboarding, replayOnboardingNpm: replayOnboarding, replayOnboardingJsLoader, customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}), + crashReportOnboarding, }; export default docs; diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx index 1d091ae2182b57..c4db4f65170967 100644 --- a/static/app/gettingStartedDocs/javascript/nextjs.tsx +++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx @@ -206,7 +206,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx index d7be7e63f8825e..01a1d131938f20 100644 --- a/static/app/gettingStartedDocs/javascript/react.tsx +++ b/static/app/gettingStartedDocs/javascript/react.tsx @@ -36,7 +36,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -240,7 +240,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/remix.tsx b/static/app/gettingStartedDocs/javascript/remix.tsx index 606340e566b4d7..0a8b0e0e386bc1 100644 --- a/static/app/gettingStartedDocs/javascript/remix.tsx +++ b/static/app/gettingStartedDocs/javascript/remix.tsx @@ -194,7 +194,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx index 25279a4a1e4996..008ff9ee40e5ef 100644 --- a/static/app/gettingStartedDocs/javascript/svelte.tsx +++ b/static/app/gettingStartedDocs/javascript/svelte.tsx @@ -38,7 +38,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -237,7 +237,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.tsx index 55c78fb1762977..a6d8752cd62f33 100644 --- a/static/app/gettingStartedDocs/javascript/sveltekit.tsx +++ b/static/app/gettingStartedDocs/javascript/sveltekit.tsx @@ -153,7 +153,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx index 7576e055f8d501..a72c0dab216340 100644 --- a/static/app/gettingStartedDocs/javascript/vue.tsx +++ b/static/app/gettingStartedDocs/javascript/vue.tsx @@ -308,7 +308,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 27b854b4e31d3e..c2a80e387fd343 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1687,6 +1687,12 @@ function buildRoutes() { import('sentry/views/performance/http/httpLandingPage'))} /> + import('sentry/views/performance/http/httpDomainSummaryPage') + )} + /> diff --git a/static/app/stores/guideStore.spec.tsx b/static/app/stores/guideStore.spec.tsx index 4485b88d6ba266..12ce581800d2aa 100644 --- a/static/app/stores/guideStore.spec.tsx +++ b/static/app/stores/guideStore.spec.tsx @@ -120,7 +120,7 @@ describe('GuideStore', function () { it('hides when a modal is open', function () { expect(GuideStore.getState().forceHide).toBe(false); - ModalStore.openModal(() => {}, {}); + ModalStore.openModal(() =>
, {}); expect(GuideStore.getState().forceHide).toBe(true); diff --git a/static/app/styles/prism.tsx b/static/app/styles/prism.tsx index 69373571a7b74c..00d39fbdb7fdc4 100644 --- a/static/app/styles/prism.tsx +++ b/static/app/styles/prism.tsx @@ -123,13 +123,6 @@ export const prismStyles = (theme: Theme) => css` } } - pre[class*='language-']::selection, - code[class*='language-']::selection, - code[class*='language-'] *::selection { - text-shadow: none; - background: var(--prism-selected); - } - pre[data-line] { position: relative; } diff --git a/static/app/styles/text.tsx b/static/app/styles/text.tsx index a054fbff23a7d2..65780a063e38b3 100644 --- a/static/app/styles/text.tsx +++ b/static/app/styles/text.tsx @@ -10,8 +10,8 @@ const textStyles = () => css` h6, p, /* Exclude ol/ul elements inside interactive selectors/menus */ - ul:not([role='listbox'], [role='grid']), - ol:not([role='listbox'], [role='grid']), + ul:not([role='listbox'], [role='grid'], [role='menu']), + ol:not([role='listbox'], [role='grid'], [role='menu']), table, dl, blockquote, diff --git a/static/app/types/group.tsx b/static/app/types/group.tsx index f60d6e5e914571..270acd328dd141 100644 --- a/static/app/types/group.tsx +++ b/static/app/types/group.tsx @@ -784,6 +784,7 @@ export interface BaseGroup { pluginContexts: any[]; // TODO(ts) pluginIssues: TitledPlugin[]; priority: PriorityLevel; + priorityLockedAt: string | null; project: Project; seenBy: User[]; shareId: string; @@ -911,7 +912,7 @@ export type KeyValueListDataItem = { meta?: Meta; subjectDataTestId?: string; subjectIcon?: React.ReactNode; - value?: React.ReactNode; + value?: React.ReactNode | Record; }; export type KeyValueListData = KeyValueListDataItem[]; diff --git a/static/app/types/metrics.tsx b/static/app/types/metrics.tsx index f01db6c9fb076b..42648fb5e5bfb1 100644 --- a/static/app/types/metrics.tsx +++ b/static/app/types/metrics.tsx @@ -62,17 +62,18 @@ export interface MetricsQueryApiResponse { }[][]; end: string; intervals: string[]; - meta: [ - {name: string; type: string}, - { - group_bys: string[]; - limit: number | null; - order: string | null; - scaling_factor?: number | null; - unit?: string | null; - unit_family?: 'duration' | 'information' | null; - }, - ][]; + meta: ( + | {name: string; type: string} + // The last entry in meta has a different shape + | { + group_bys: string[]; + limit: number | null; + order: string | null; + scaling_factor?: number | null; + unit?: string | null; + unit_family?: 'duration' | 'information' | null; + } + )[][]; start: string; } @@ -99,6 +100,7 @@ export type MetricMeta = { // name is returned by the API but should not be used, use parseMRI(mri).name instead // name: string; operations: MetricsOperation[]; + projectIds: number[]; type: MetricType; unit: string; }; diff --git a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx index 4807a290ab5bbc..4c35166e47b052 100644 --- a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx +++ b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx @@ -17,7 +17,7 @@ export const feedbackEventMap: Record = { 'feedback.list-item-selected': 'Selected Item in Feedback List', 'feedback.details-integration-issue-clicked': 'Clicked Integration Issue Button in Feedback Details', - 'feedback.whats-new-banner-dismissed': 'Dismissed Feedback Whatss New Banner', + 'feedback.whats-new-banner-dismissed': 'Dismissed Feedback Whats New Banner', 'feedback.whats-new-banner-viewed': 'Viewed Feedback Whats New Banner', 'feedback.mark-spam-clicked': 'Marked Feedback as Spam', 'feedback.list-view-setup-sidebar': 'Viewed Feedback Onboarding Sidebar', diff --git a/static/app/utils/analytics/replayAnalyticsEvents.tsx b/static/app/utils/analytics/replayAnalyticsEvents.tsx index 2d8971f5614e21..45c67bcdb3ff65 100644 --- a/static/app/utils/analytics/replayAnalyticsEvents.tsx +++ b/static/app/utils/analytics/replayAnalyticsEvents.tsx @@ -63,7 +63,7 @@ export type ReplayEventParameters = { platform: string | undefined; project_id: string | undefined; referrer: string; - referrer_table: ReferrerTableType; + referrer_table?: ReferrerTableType; }; 'replay.list-paginated': { direction: 'next' | 'prev'; diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx index c80967aa90c050..2b8bb0495c05aa 100644 --- a/static/app/utils/discover/fields.tsx +++ b/static/app/utils/discover/fields.tsx @@ -119,6 +119,38 @@ export type Column = QueryFieldValue; export type Alignments = 'left' | 'right'; +export type CountUnit = 'count'; + +export enum DurationUnit { + NANOSECOND = 'nanosecond', + MICROSECOND = 'microsecond', + MILLISECOND = 'millisecond', + SECOND = 'second', + MINUTE = 'minute', + HOUR = 'hour', + DAY = 'day', + WEEK = 'week', + MONTH = 'month', + YEAR = 'year', +} + +export enum SizeUnit { + BIT = 'bit', + BYTE = 'byte', + KIBIBYTE = 'kibibyte', + KILOBYTE = 'kilobyte', + MEBIBYTE = 'mebibyte', + MEGABYTE = 'megabyte', + GIBIBYTE = 'gibibyte', + GIGABYTE = 'gigabyte', + TEBIBYTE = 'tebibyte', + TERABYTE = 'terabyte', + PEBIBYTE = 'pebibyte', + PETABYTE = 'petabyte', + EXBIBYTE = 'exbibyte', + EXABYTE = 'exabyte', +} + export enum RateUnit { PER_SECOND = '1/second', PER_MINUTE = '1/minute', diff --git a/static/app/utils/docs.tsx b/static/app/utils/docs.tsx index b88e575a614e11..2e7a0fc70c0adc 100644 --- a/static/app/utils/docs.tsx +++ b/static/app/utils/docs.tsx @@ -61,7 +61,7 @@ export function getDocsPlatform( return null; } -export function getConfigureTracingDocsLink( +export function getConfigurePerformanceDocsLink( project: AvatarProject | undefined ): string | null { const platform = project?.platform ?? null; diff --git a/static/app/utils/metrics/constants.tsx b/static/app/utils/metrics/constants.tsx index 45d5844d2bc098..2a7a290166b134 100644 --- a/static/app/utils/metrics/constants.tsx +++ b/static/app/utils/metrics/constants.tsx @@ -40,6 +40,7 @@ export const emptyMetricsQueryWidget: MetricQueryWidgetParams = { groupBy: [], sort: DEFAULT_SORT_STATE, displayType: MetricDisplayType.LINE, + isHidden: false, }; export const emptyMetricsFormulaWidget: MetricFormulaWidgetParams = { @@ -48,4 +49,5 @@ export const emptyMetricsFormulaWidget: MetricFormulaWidgetParams = { formula: '', sort: DEFAULT_SORT_STATE, displayType: MetricDisplayType.LINE, + isHidden: false, }; diff --git a/static/app/utils/metrics/dashboardImport.spec.tsx b/static/app/utils/metrics/dashboardImport.spec.tsx index 41c2012c6ef828..b711c6b8b76fdf 100644 --- a/static/app/utils/metrics/dashboardImport.spec.tsx +++ b/static/app/utils/metrics/dashboardImport.spec.tsx @@ -47,6 +47,7 @@ const mockAvailableMetrics = (mris: MRI[]): MetricMeta[] => { mri, operations: [], blockingStatus: [], + projectIds: [], })) as MetricMeta[]; }; diff --git a/static/app/utils/metrics/index.spec.tsx b/static/app/utils/metrics/index.spec.tsx index 421e3b4abcaef1..d5bce4a368dea5 100644 --- a/static/app/utils/metrics/index.spec.tsx +++ b/static/app/utils/metrics/index.spec.tsx @@ -1,3 +1,5 @@ +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + import type {MetricsOperation} from 'sentry/types'; import { getAbsoluteDateTimeRange, @@ -121,9 +123,11 @@ describe('isFormattedMQL', () => { }); describe('getAbsoluteDateTimeRange', () => { - beforeAll(() => { - jest.useFakeTimers(); - jest.setSystemTime(new Date('2024-01-01T00:00:00Z')); + beforeEach(() => { + setMockDate(new Date('2024-01-01T00:00:00Z')); + }); + afterEach(() => { + resetMockDate(); }); it('should return the correct object with "start" and "end" when period is not provided', () => { @@ -150,8 +154,4 @@ describe('getAbsoluteDateTimeRange', () => { end: '2024-01-01T00:00:00.000Z', }); }); - - afterAll(() => { - jest.useRealTimers(); - }); }); diff --git a/static/app/utils/metrics/normalizeMetricValue.spec.tsx b/static/app/utils/metrics/normalizeMetricValue.spec.tsx deleted file mode 100644 index 3c3eacfd20e508..00000000000000 --- a/static/app/utils/metrics/normalizeMetricValue.spec.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import { - getMetricValueNormalizer, - getNormalizedMetricUnit, -} from 'sentry/utils/metrics/normalizeMetricValue'; - -describe('getNormalizedMetricUnit', () => { - it('returns "millisecond" when unit is in timeConversionFactors', () => { - expect(getNormalizedMetricUnit('second')).toBe('millisecond'); - expect(getNormalizedMetricUnit('hour')).toBe('millisecond'); - expect(getNormalizedMetricUnit('hours')).toBe('millisecond'); - expect(getNormalizedMetricUnit('minute')).toBe('millisecond'); - expect(getNormalizedMetricUnit('nanoseconds')).toBe('millisecond'); - }); - - it('returns "byte" when unit is in byte10ConversionFactors', () => { - expect(getNormalizedMetricUnit('kilobyte')).toBe('byte'); - expect(getNormalizedMetricUnit('petabyte')).toBe('byte'); - expect(getNormalizedMetricUnit('petabytes')).toBe('byte'); - }); - - it('returns "byte2" when unit is in byte2ConversionFactors', () => { - expect(getNormalizedMetricUnit('bit')).toBe('byte2'); - expect(getNormalizedMetricUnit('kibibyte')).toBe('byte2'); - expect(getNormalizedMetricUnit('kibibytes')).toBe('byte2'); - }); - - it('returns the unit when it is not in any of the conversion factors', () => { - expect(getNormalizedMetricUnit('foo')).toBe('foo'); - }); - - it('returns none for count operations', () => { - expect(getNormalizedMetricUnit('second', 'count')).toBe('none'); - expect(getNormalizedMetricUnit('seconds', 'count_unique')).toBe('none'); - }); -}); - -describe('getMetricValueNormalizer', () => { - it('returns a function that normalizes the value to milliseconds when the unit is in timeConversionFactors', () => { - expect(getMetricValueNormalizer('second')(1)).toBe(1000); - expect(getMetricValueNormalizer('seconds')(2)).toBe(2000); - - expect(getMetricValueNormalizer('hour')(1)).toBe(3600000); - expect(getMetricValueNormalizer('hours')(2)).toBe(7200000); - }); - - it('returns a function that normalizes the value to bytes when the unit is in byte10ConversionFactors', () => { - expect(getMetricValueNormalizer('byte')(1)).toBe(1); - expect(getMetricValueNormalizer('bytes')(2)).toBe(2); - - expect(getMetricValueNormalizer('terabyte')(1)).toBe(1000 ** 4); - expect(getMetricValueNormalizer('terabytes')(2)).toBe(2 * 1000 ** 4); - }); - - it('returns a function that normalizes the value to bytes when the unit is in byte2ConversionFactors', () => { - expect(getMetricValueNormalizer('bit')(1)).toBe(1 / 8); - expect(getMetricValueNormalizer('bits')(1)).toBe(1 / 8); - - expect(getMetricValueNormalizer('tebibyte')(1)).toBe(1024 ** 4); - expect(getMetricValueNormalizer('tebibytes')(2)).toBe(2 * 1024 ** 4); - }); - - it('skips nomalization for count operations', () => { - expect(getMetricValueNormalizer('second', 'count')(1)).toBe(1); - expect(getMetricValueNormalizer('seconds', 'count_unique')(2)).toBe(2); - }); -}); diff --git a/static/app/utils/metrics/normalizeMetricValue.tsx b/static/app/utils/metrics/normalizeMetricValue.tsx deleted file mode 100644 index 9e6a45373c7792..00000000000000 --- a/static/app/utils/metrics/normalizeMetricValue.tsx +++ /dev/null @@ -1,109 +0,0 @@ -import { - DAY, - HOUR, - MICROSECOND, - MILLISECOND, - MINUTE, - NANOSECOND, - SECOND, - WEEK, -} from 'sentry/utils/formatters'; - -const timeConversionFactors = { - week: WEEK, - weeks: WEEK, - day: DAY, - days: DAY, - hour: HOUR, - hours: HOUR, - minute: MINUTE, - minutes: MINUTE, - second: SECOND, - seconds: SECOND, - millisecond: MILLISECOND, - milliseconds: MILLISECOND, - microsecond: MICROSECOND, - microseconds: MICROSECOND, - nanosecond: NANOSECOND, - nanoseconds: NANOSECOND, -}; - -const byte10ConversionFactors = { - byte: 1, - bytes: 1, - kilobyte: 1000, - kilobytes: 1000, - megabyte: 1000 ** 2, - megabytes: 1000 ** 2, - gigabyte: 1000 ** 3, - gigabytes: 1000 ** 3, - terabyte: 1000 ** 4, - terabytes: 1000 ** 4, - petabyte: 1000 ** 5, - petabytes: 1000 ** 5, - exabyte: 1000 ** 6, - exabytes: 1000 ** 6, -}; - -const byte2ConversionFactors = { - bit: 1 / 8, - bits: 1 / 8, - byte2: 1, - kibibyte: 1024, - kibibytes: 1024, - mebibyte: 1024 ** 2, - mebibytes: 1024 ** 2, - gibibyte: 1024 ** 3, - gibibytes: 1024 ** 3, - tebibyte: 1024 ** 4, - tebibytes: 1024 ** 4, - pebibyte: 1024 ** 5, - pebibytes: 1024 ** 5, - exbibyte: 1024 ** 6, - exbibytes: 1024 ** 6, -}; - -export function getMetricConversionFunction(fromUnit: string, toUnit: string) { - let conversionFactors: Record | null = null; - - if (fromUnit in timeConversionFactors && toUnit in timeConversionFactors) { - conversionFactors = timeConversionFactors; - } else if (fromUnit in byte10ConversionFactors && toUnit in byte10ConversionFactors) { - conversionFactors = byte10ConversionFactors; - } else if (fromUnit in byte2ConversionFactors && toUnit in byte2ConversionFactors) { - conversionFactors = byte2ConversionFactors; - } - - return (value: T): T => { - if (!value || !conversionFactors) { - return value; - } - - return (value * (conversionFactors[fromUnit] / conversionFactors[toUnit])) as T; - }; -} - -export function getNormalizedMetricUnit(unit: string, operation?: string) { - if (!unit || operation === 'count' || operation === 'count_unique') { - return 'none'; - } - - if (unit in timeConversionFactors) { - return 'millisecond'; - } - - if (unit in byte10ConversionFactors) { - return 'byte'; - } - - if (unit in byte2ConversionFactors) { - return 'byte2'; - } - - return unit; -} - -export function getMetricValueNormalizer(unit: string, operation?: string) { - const normalizedMetricUnit = getNormalizedMetricUnit(unit, operation); - return getMetricConversionFunction(unit, normalizedMetricUnit); -} diff --git a/static/app/utils/metrics/types.tsx b/static/app/utils/metrics/types.tsx index 962928a3c52fd8..8b0b561739c481 100644 --- a/static/app/utils/metrics/types.tsx +++ b/static/app/utils/metrics/types.tsx @@ -35,6 +35,7 @@ export enum MetricQueryType { export interface BaseWidgetParams { displayType: MetricDisplayType; id: number; + isHidden: boolean; type: MetricQueryType; focusedSeries?: FocusedMetricsSeries[]; sort?: SortState; diff --git a/static/app/utils/metrics/useBlockMetric.tsx b/static/app/utils/metrics/useBlockMetric.tsx index 2ea24fe1ca2db5..72cbe8a31f591d 100644 --- a/static/app/utils/metrics/useBlockMetric.tsx +++ b/static/app/utils/metrics/useBlockMetric.tsx @@ -1,7 +1,6 @@ import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; import {t} from 'sentry/locale'; import type {MetricMeta, MRI, Project} from 'sentry/types'; -import {getUseCaseFromMRI} from 'sentry/utils/metrics/mri'; import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; import useApi from 'sentry/utils/useApi'; import useOrganization from 'sentry/utils/useOrganization'; @@ -46,21 +45,22 @@ export const useBlockMetric = (project: Project) => { }); }, onSuccess: data => { - const useCase = getUseCaseFromMRI(data.metricMri); - const metaQueryKey = getMetricsMetaQueryKey( - slug, - {projects: [parseInt(project.id, 10)]}, - useCase ?? 'custom' - ); - queryClient.setQueryData( - metaQueryKey, + const metaQueryKey = getMetricsMetaQueryKey(slug, {}); + + // Only match the endpoint, to search in all insances of the query + const queryKeyFilter = {queryKey: [metaQueryKey[0]]}; + + queryClient.setQueriesData( + queryKeyFilter, (oldData: BlockMetricResponse): BlockMetricResponse => { if (!oldData) { return undefined; } const oldMeta = oldData[0]; const index = oldMeta.findIndex( - (metric: {mri: MRI}) => metric.mri === data.metricMri + metric => + metric.mri === data.metricMri && + metric.projectIds.includes(Number(project.id)) ); if (index !== undefined && index !== -1) { @@ -78,7 +78,7 @@ export const useBlockMetric = (project: Project) => { addSuccessMessage(t('Metric updated')); - queryClient.invalidateQueries(metaQueryKey); + queryClient.invalidateQueries(queryKeyFilter); }, onError: () => { addErrorMessage(t('An error occurred while updating the metric')); diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index aa8655444aed94..dd473684600f69 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -1,6 +1,6 @@ import type {PageFilters} from 'sentry/types'; import {formatMRI, getUseCaseFromMRI} from 'sentry/utils/metrics/mri'; -import type {ApiQueryKey, UseApiQueryOptions} from 'sentry/utils/queryClient'; +import type {ApiQueryKey} from 'sentry/utils/queryClient'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; @@ -9,22 +9,12 @@ import type {MetricMeta, MRI, UseCase} from '../../types/metrics'; import {getMetaDateTimeParams} from './index'; const EMPTY_ARRAY: MetricMeta[] = []; -const DEFAULT_USE_CASES = ['sessions', 'transactions', 'custom', 'spans']; - -export function getMetricsMetaQueryKeys( - orgSlug: string, - projects: PageFilters['projects'], - useCases?: UseCase[] -): ApiQueryKey[] { - return ( - useCases?.map(useCase => getMetricsMetaQueryKey(orgSlug, {projects}, useCase)) ?? [] - ); -} +const DEFAULT_USE_CASES: UseCase[] = ['sessions', 'transactions', 'custom', 'spans']; export function getMetricsMetaQueryKey( orgSlug: string, {projects, datetime}: Partial, - useCase: UseCase + useCase?: UseCase[] ): ApiQueryKey { const queryParams = projects?.length ? {useCase, project: projects, ...getMetaDateTimeParams(datetime)} @@ -32,72 +22,36 @@ export function getMetricsMetaQueryKey( return [`/organizations/${orgSlug}/metrics/meta/`, {query: queryParams}]; } -function useMetaUseCase( - useCase: UseCase, - pageFilters: Partial, - options: Omit, 'staleTime'> -) { - const {slug} = useOrganization(); - - const apiQueryResult = useApiQuery( - getMetricsMetaQueryKey(slug, pageFilters, useCase), - { - ...options, - staleTime: 2000, // 2 seconds to cover page load - } - ); - - return apiQueryResult; -} - export function useMetricsMeta( pageFilters: Partial, - useCases?: UseCase[], + useCases: UseCase[] = DEFAULT_USE_CASES, filterBlockedMetrics = true, enabled: boolean = true ): {data: MetricMeta[]; isLoading: boolean} { - const enabledUseCases = useCases ?? DEFAULT_USE_CASES; + const {slug} = useOrganization(); - const {data: sessionMeta = [], ...sessionsReq} = useMetaUseCase( - 'sessions', - pageFilters, + const {data, isLoading} = useApiQuery( + getMetricsMetaQueryKey(slug, pageFilters, useCases), { - enabled: enabled && enabledUseCases.includes('sessions'), + enabled, + staleTime: 2000, // 2 seconds to cover page load } ); - const {data: txnsMeta = [], ...txnsReq} = useMetaUseCase('transactions', pageFilters, { - enabled: enabled && enabledUseCases.includes('transactions'), - }); - const {data: customMeta = [], ...customReq} = useMetaUseCase('custom', pageFilters, { - enabled: enabled && enabledUseCases.includes('custom'), - }); - const {data: spansMeta = [], ...spansReq} = useMetaUseCase('spans', pageFilters, { - enabled: enabled && enabledUseCases.includes('spans'), - }); - const isLoading = - (sessionsReq.isLoading && sessionsReq.fetchStatus !== 'idle') || - (txnsReq.isLoading && txnsReq.fetchStatus !== 'idle') || - (customReq.isLoading && customReq.fetchStatus !== 'idle') || - (spansReq.isLoading && spansReq.fetchStatus !== 'idle'); + if (!data) { + return {data: EMPTY_ARRAY, isLoading}; + } - const data = [ - ...(enabledUseCases.includes('sessions') ? sessionMeta : []), - ...(enabledUseCases.includes('transactions') ? txnsMeta : []), - ...(enabledUseCases.includes('custom') ? customMeta : []), - ...(enabledUseCases.includes('spans') ? spansMeta : []), - ].sort((a, b) => formatMRI(a.mri).localeCompare(formatMRI(b.mri))); + const meta = data.sort((a, b) => formatMRI(a.mri).localeCompare(formatMRI(b.mri))); if (!filterBlockedMetrics) { - return {data, isLoading}; + return {data: meta, isLoading}; } return { - data: isLoading - ? EMPTY_ARRAY - : data.filter(meta => { - return meta.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; - }), + data: data.filter(entry => { + return entry.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; + }), isLoading, }; } diff --git a/static/app/utils/profiling/hooks/useProfileFunctionTrends.spec.tsx b/static/app/utils/profiling/hooks/useProfileFunctionTrends.spec.tsx index 6a45e38f49f56d..e6390be9cf15d4 100644 --- a/static/app/utils/profiling/hooks/useProfileFunctionTrends.spec.tsx +++ b/static/app/utils/profiling/hooks/useProfileFunctionTrends.spec.tsx @@ -1,4 +1,3 @@ -import type {ReactElement} from 'react'; import {useMemo} from 'react'; import {initializeOrg} from 'sentry-test/initializeOrg'; @@ -9,7 +8,7 @@ import {useProfileFunctionTrends} from 'sentry/utils/profiling/hooks/useProfileF import {QueryClientProvider} from 'sentry/utils/queryClient'; import {OrganizationContext} from 'sentry/views/organizationContext'; -function TestContext({children}: {children: ReactElement}) { +function TestContext({children}: {children: React.ReactNode}) { const {organization} = useMemo(() => initializeOrg(), []); return ( diff --git a/static/app/utils/profiling/hooks/useProfileFunctions.spec.tsx b/static/app/utils/profiling/hooks/useProfileFunctions.spec.tsx index a69732625e5c8f..4a170146c00db1 100644 --- a/static/app/utils/profiling/hooks/useProfileFunctions.spec.tsx +++ b/static/app/utils/profiling/hooks/useProfileFunctions.spec.tsx @@ -1,4 +1,3 @@ -import type {ReactElement} from 'react'; import {useMemo} from 'react'; import {initializeOrg} from 'sentry-test/initializeOrg'; @@ -9,7 +8,7 @@ import {useProfileFunctions} from 'sentry/utils/profiling/hooks/useProfileFuncti import {QueryClientProvider} from 'sentry/utils/queryClient'; import {OrganizationContext} from 'sentry/views/organizationContext'; -function TestContext({children}: {children: ReactElement}) { +function TestContext({children}: {children: React.ReactNode}) { const {organization} = useMemo(() => initializeOrg(), []); return ( diff --git a/static/app/utils/theme.tsx b/static/app/utils/theme.tsx index 508e0700ebc7aa..9c781967b0f132 100644 --- a/static/app/utils/theme.tsx +++ b/static/app/utils/theme.tsx @@ -141,7 +141,6 @@ export const darkColors = { const prismLight = { '--prism-base': '#332B3B', - '--prism-selected': '#F5F3F7', '--prism-inline-code': '#332B3B', '--prism-inline-code-background': '#F5F3F7', '--prism-highlight-background': '#5C78A31C', @@ -158,7 +157,6 @@ const prismLight = { const prismDark = { '--prism-base': '#D6D0DC', - '--prism-selected': '#393041', '--prism-inline-code': '#D6D0DC', '--prism-inline-code-background': '#18121C', '--prism-highlight-background': '#A8A2C31C', diff --git a/static/app/utils/useDismissAlert.spec.tsx b/static/app/utils/useDismissAlert.spec.tsx index 00757ab374adea..ee63c3fc5ba1bf 100644 --- a/static/app/utils/useDismissAlert.spec.tsx +++ b/static/app/utils/useDismissAlert.spec.tsx @@ -1,10 +1,10 @@ import {reactHooks} from 'sentry-test/reactTestingLibrary'; +import {setMockDate} from 'sentry-test/utils'; import localStorage from 'sentry/utils/localStorage'; import useDismissAlert from 'sentry/utils/useDismissAlert'; jest.mock('sentry/utils/localStorage'); -jest.useFakeTimers(); const mockSetItem = jest.mocked(localStorage.setItem); const mockGetItem = jest.mocked(localStorage.getItem); @@ -14,7 +14,7 @@ const now = new Date('2020-01-01'); describe('useDismissAlert', () => { beforeEach(() => { - jest.setSystemTime(now); + setMockDate(now); mockSetItem.mockReset(); mockGetItem.mockReset(); @@ -53,25 +53,26 @@ describe('useDismissAlert', () => { expect(result.current.isDismissed).toBeTruthy(); }); - it('should set the current timestamp into localstorage when an alert is dismissed', () => { - const {result} = reactHooks.renderHook(useDismissAlert, { + it('should set the current timestamp into localstorage when an alert is dismissed', async () => { + const {result, waitFor} = reactHooks.renderHook(useDismissAlert, { initialProps: {key}, }); reactHooks.act(() => { result.current.dismiss(); - jest.runAllTicks(); }); - expect(mockSetItem).toHaveBeenCalledWith( - key, - JSON.stringify(now.getTime().toString()) + await waitFor(() => + expect(mockSetItem).toHaveBeenCalledWith( + key, + JSON.stringify(now.getTime().toString()) + ) ); }); it('should be dismissed if the timestamp in localStorage is older than the expiration', () => { const today = new Date('2020-01-01'); - jest.setSystemTime(today); + setMockDate(today); // Dismissed on christmas const christmas = new Date('2019-12-25').getTime(); diff --git a/static/app/utils/useOrganization.stories.tsx b/static/app/utils/useOrganization.stories.tsx new file mode 100644 index 00000000000000..ce58ee699f9700 --- /dev/null +++ b/static/app/utils/useOrganization.stories.tsx @@ -0,0 +1,16 @@ +import ObjectInspector from 'sentry/components/objectInspector'; +import StructuredEventData from 'sentry/components/structuredEventData'; +import storyBook from 'sentry/stories/storyBook'; +import useOrganization from 'sentry/utils/useOrganization'; + +export default storyBook('useOrganization', story => { + story('useOrganization - via ObjectInspector', () => { + const org = useOrganization(); + return ; + }); + + story('useOrganization - via StructuredEventData', () => { + const org = useOrganization(); + return ; + }); +}); diff --git a/static/app/views/alerts/create.spec.tsx b/static/app/views/alerts/create.spec.tsx index 0877dd7ee13528..a8e59f6ff26804 100644 --- a/static/app/views/alerts/create.spec.tsx +++ b/static/app/views/alerts/create.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {EnvironmentsFixture} from 'sentry-fixture/environments'; import {GroupsFixture} from 'sentry-fixture/groups'; import {LocationFixture} from 'sentry-fixture/locationFixture'; @@ -9,6 +8,7 @@ import {RouteComponentPropsFixture} from 'sentry-fixture/routeComponentPropsFixt import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ProjectsStore from 'sentry/stores/projectsStore'; import TeamStore from 'sentry/stores/teamStore'; diff --git a/static/app/views/alerts/list/incidents/index.spec.tsx b/static/app/views/alerts/list/incidents/index.spec.tsx index 6599c1eeadd3a6..af8f39832c942a 100644 --- a/static/app/views/alerts/list/incidents/index.spec.tsx +++ b/static/app/views/alerts/list/incidents/index.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {IncidentFixture} from 'sentry-fixture/incident'; import {IncidentStatsFixture} from 'sentry-fixture/incidentStats'; import {MetricRuleFixture} from 'sentry-fixture/metricRule'; @@ -7,6 +6,7 @@ import {TeamFixture} from 'sentry-fixture/team'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {act, render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ProjectsStore from 'sentry/stores/projectsStore'; import TeamStore from 'sentry/stores/teamStore'; diff --git a/static/app/views/alerts/rules/issue/index.spec.tsx b/static/app/views/alerts/rules/issue/index.spec.tsx index 34d531dbed638b..78d24bd2c26b44 100644 --- a/static/app/views/alerts/rules/issue/index.spec.tsx +++ b/static/app/views/alerts/rules/issue/index.spec.tsx @@ -1,6 +1,5 @@ import type {PlainRoute} from 'react-router'; import {browserHistory} from 'react-router'; -import selectEvent from 'react-select-event'; import moment from 'moment'; import {EnvironmentsFixture} from 'sentry-fixture/environments'; import {ProjectFixture} from 'sentry-fixture/project'; @@ -17,6 +16,7 @@ import { waitFor, within, } from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import { addErrorMessage, diff --git a/static/app/views/alerts/rules/issue/ruleNode.spec.tsx b/static/app/views/alerts/rules/issue/ruleNode.spec.tsx index 320f3c0f8892ac..9a9316573d7554 100644 --- a/static/app/views/alerts/rules/issue/ruleNode.spec.tsx +++ b/static/app/views/alerts/rules/issue/ruleNode.spec.tsx @@ -1,8 +1,8 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ModalStore from 'sentry/stores/modalStore'; import RuleNode from 'sentry/views/alerts/rules/issue/ruleNode'; @@ -182,7 +182,7 @@ describe('RuleNode', () => { screen.getByText('Here is a number choice field').parentElement ).toHaveTextContent(labelReplacer(label, {[`{${fieldName}}`]: 'label2'})); - selectEvent.openMenu(screen.getByText('label2')); + await selectEvent.openMenu(screen.getByText('label2')); await userEvent.click(screen.getByText('label3')); expect(onPropertyChange).toHaveBeenCalledWith(index, fieldName, '3'); diff --git a/static/app/views/alerts/rules/issue/ticketRuleModal.spec.tsx b/static/app/views/alerts/rules/issue/ticketRuleModal.spec.tsx index e2b152724f09a5..51cb2e9d604e10 100644 --- a/static/app/views/alerts/rules/issue/ticketRuleModal.spec.tsx +++ b/static/app/views/alerts/rules/issue/ticketRuleModal.spec.tsx @@ -1,8 +1,8 @@ -import selectEvent from 'react-select-event'; import styled from '@emotion/styled'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {addSuccessMessage} from 'sentry/actionCreators/indicator'; import {makeCloseButton} from 'sentry/components/globalModal/components'; @@ -240,7 +240,7 @@ describe('ProjectAlerts -> TicketRuleModal', function () { } const menu = screen.getByRole('textbox', {name: 'Assignee'}); - selectEvent.openMenu(menu); + await selectEvent.openMenu(menu); await userEvent.type(menu, 'Joe{Escape}'); await selectEvent.select(menu, 'Joe'); diff --git a/static/app/views/alerts/rules/metric/details/utils.spec.tsx b/static/app/views/alerts/rules/metric/details/utils.spec.tsx index f013d088220bf3..f4a71c1e6850e0 100644 --- a/static/app/views/alerts/rules/metric/details/utils.spec.tsx +++ b/static/app/views/alerts/rules/metric/details/utils.spec.tsx @@ -1,18 +1,18 @@ -import MockDate from 'mockdate'; import moment from 'moment'; import {IncidentFixture} from 'sentry-fixture/incident'; import {MetricRuleFixture} from 'sentry-fixture/metricRule'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + import {buildMetricGraphDateRange} from 'sentry/views/alerts/rules/metric/details/utils'; describe('buildMetricGraphDateRange', () => { const now = '2022-05-16T20:00:00'; - beforeAll(() => { - MockDate.set(`${now}Z`); + beforeEach(() => { + setMockDate(new Date(`${now}Z`)); }); - afterAll(() => { - // reset mock date - MockDate.set(new Date(1508208080000)); + afterEach(() => { + resetMockDate(); }); it('should use current date for an active alert', () => { diff --git a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx index e93f640530388d..05a3089d7d6da0 100644 --- a/static/app/views/alerts/rules/metric/ruleForm.spec.tsx +++ b/static/app/views/alerts/rules/metric/ruleForm.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import {EventsStatsFixture} from 'sentry-fixture/events'; import {IncidentTriggerFixture} from 'sentry-fixture/incidentTrigger'; import {MetricRuleFixture} from 'sentry-fixture/metricRule'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {addErrorMessage} from 'sentry/actionCreators/indicator'; import type FormModel from 'sentry/components/forms/model'; diff --git a/static/app/views/dashboards/manage/index.spec.tsx b/static/app/views/dashboards/manage/index.spec.tsx index 951000b832bb7b..5aaa4cb9b95fd0 100644 --- a/static/app/views/dashboards/manage/index.spec.tsx +++ b/static/app/views/dashboards/manage/index.spec.tsx @@ -1,10 +1,10 @@ import {browserHistory} from 'react-router'; -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {RouteComponentPropsFixture} from 'sentry-fixture/routeComponentPropsFixture'; import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ProjectsStore from 'sentry/stores/projectsStore'; import ManageDashboards from 'sentry/views/dashboards/manage'; diff --git a/static/app/views/dashboards/metrics/table.tsx b/static/app/views/dashboards/metrics/table.tsx index 386d60463047e8..27fce59fcf6b38 100644 --- a/static/app/views/dashboards/metrics/table.tsx +++ b/static/app/views/dashboards/metrics/table.tsx @@ -138,7 +138,10 @@ export function getTableData( const normalizedResults = filteredQueries.map((query, index) => { const queryResults = data.data[index]; - const metaUnit = data.meta[index]?.[1]?.unit; + const meta = data.meta[index]; + const lastMetaEntry = data.meta[index]?.[meta.length - 1]; + const metaUnit = + (lastMetaEntry && 'unit' in lastMetaEntry && lastMetaEntry.unit) || 'none'; const normalizedGroupResults = queryResults.map(group => { return { by: {...getEmptyGroup(tags), ...group.by}, diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilder.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilder.spec.tsx index 50085ff9ce73ce..f19c43db1e91e6 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilder.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilder.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {urlEncode} from '@sentry/utils'; import {MetricsFieldFixture} from 'sentry-fixture/metrics'; import {ReleaseFixture} from 'sentry-fixture/release'; @@ -15,6 +14,7 @@ import { userEvent, waitFor, } from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import * as modals from 'sentry/actionCreators/modal'; import ProjectsStore from 'sentry/stores/projectsStore'; @@ -1154,8 +1154,6 @@ describe('WidgetBuilder', function () { }); it('does not wipe column changes when filters are modified', async function () { - jest.useFakeTimers(); - // widgetIndex: undefined means creating a new widget renderTestComponent({params: {widgetIndex: undefined}}); diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx index bf345373c17f65..2394ac70e152e6 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {urlEncode} from '@sentry/utils'; import {MetricsFieldFixture} from 'sentry-fixture/metrics'; import {SessionsFieldFixture} from 'sentry-fixture/sessions'; @@ -12,6 +11,8 @@ import { waitFor, within, } from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import ProjectsStore from 'sentry/stores/projectsStore'; import TagStore from 'sentry/stores/tagStore'; @@ -270,7 +271,7 @@ describe('WidgetBuilder', function () { afterEach(function () { MockApiClient.clearMockResponses(); jest.clearAllMocks(); - jest.useRealTimers(); + resetMockDate(); }); describe('Release Widgets', function () { @@ -362,7 +363,7 @@ describe('WidgetBuilder', function () { }); it('does not allow sort on tags except release', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -403,7 +404,7 @@ describe('WidgetBuilder', function () { }); it('makes the appropriate sessions call', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -435,7 +436,7 @@ describe('WidgetBuilder', function () { }); it('calls the session endpoint with the right limit', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -473,7 +474,7 @@ describe('WidgetBuilder', function () { }); it('calls sessions api when session.status is selected as a groupby', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -532,7 +533,7 @@ describe('WidgetBuilder', function () { }); it('sets widgetType to release', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); await userEvent.click(await screen.findByText('Releases (Sessions, Crash rates)'), { @@ -604,7 +605,7 @@ describe('WidgetBuilder', function () { }); it('adds a function when the only column chosen in a table is a tag', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); await userEvent.click(await screen.findByText('Releases (Sessions, Crash rates)'), { diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx index b928e03b0944cf..f399240a10dc75 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilderSortBy.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {urlEncode} from '@sentry/utils'; import {MetricsFieldFixture} from 'sentry-fixture/metrics'; import {SessionsFieldFixture} from 'sentry-fixture/sessions'; @@ -6,6 +5,7 @@ import {TagsFixture} from 'sentry-fixture/tags'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ProjectsStore from 'sentry/stores/projectsStore'; import TagStore from 'sentry/stores/tagStore'; @@ -553,7 +553,7 @@ describe('WidgetBuilder', function () { await selectEvent.select(await screen.findByText('Select group'), 'project'); expect(screen.getAllByText('count()')).toHaveLength(2); await selectEvent.select(screen.getAllByText('count()')[1], 'Custom Equation'); - selectEvent.openMenu(screen.getByPlaceholderText('Enter Equation')); + await selectEvent.openMenu(screen.getByPlaceholderText('Enter Equation')); await userEvent.click(screen.getByPlaceholderText('Enter Equation')); diff --git a/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx b/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx index cb6fcd8ad8fc02..f3aeac2860e8f6 100644 --- a/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx +++ b/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx @@ -6,6 +6,7 @@ import {SessionsFieldFixture} from 'sentry-fixture/sessions'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import { DashboardFilterKeys, @@ -72,12 +73,15 @@ describe('Dashboards > ReleaseWidgetQueries', function () { const api = new MockApiClient(); + beforeEach(function () { + setMockDate(new Date('2022-08-02')); + }); afterEach(function () { MockApiClient.clearMockResponses(); + resetMockDate(); }); it('can send chart requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsFieldFixture(`session.all`), @@ -459,7 +463,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send table requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsSessionUserCountByStatusByReleaseFixture(), @@ -559,7 +562,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send big number requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsFieldFixture(`count_unique(sentry.sessions.user)`), @@ -605,7 +607,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send multiple API requests', function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const metricsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: SessionsFieldFixture(`session.all`), @@ -700,7 +701,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('adjusts interval based on date window', function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: SessionsFieldFixture(`session.all`), diff --git a/static/app/views/ddm/chart/chart.tsx b/static/app/views/ddm/chart/chart.tsx index e347ea42000ae2..4fb1ce03ca7f29 100644 --- a/static/app/views/ddm/chart/chart.tsx +++ b/static/app/views/ddm/chart/chart.tsx @@ -17,7 +17,7 @@ import {isChartHovered} from 'sentry/components/charts/utils'; import {t} from 'sentry/locale'; import type {ReactEchartsRef} from 'sentry/types/echarts'; import mergeRefs from 'sentry/utils/mergeRefs'; -import {formatMetricsUsingUnitAndOp} from 'sentry/utils/metrics/formatters'; +import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import {MetricDisplayType} from 'sentry/utils/metrics/types'; import type {CombinedMetricChartProps, Series} from 'sentry/views/ddm/chart/types'; import type {UseFocusAreaResult} from 'sentry/views/ddm/chart/useFocusArea'; @@ -70,9 +70,7 @@ export const MetricChart = forwardRef( ({series, displayType, height, group, samples, focusArea}, forwardedRef) => { const chartRef = useRef(null); - const firstUnit = series.find(s => !s.hidden)?.unit || series[0]?.unit || 'none'; - const firstOperation = - series.find(s => !s.hidden)?.operation || series[0]?.operation || ''; + const firstUnit = series.find(s => !s.hidden)?.unit || 'none'; useEffect(() => { if (!group) { @@ -112,23 +110,18 @@ export const MetricChart = forwardRef( const chartProps = useMemo(() => { const hasMultipleUnits = new Set(seriesToShow.map(s => s.unit)).size > 1; - const seriesMeta = seriesToShow.reduce( + const seriesUnits = seriesToShow.reduce( (acc, s) => { - acc[s.seriesName] = { - unit: s.unit, - operation: s.operation, - }; + acc[s.seriesName] = s.unit; return acc; }, - {} as Record + {} as Record ); const timeseriesFormatters = { valueFormatter: (value: number, seriesName?: string) => { - const meta = seriesName - ? seriesMeta[seriesName] - : {unit: firstUnit, operation: undefined}; - return formatMetricsUsingUnitAndOp(value, meta.unit, meta.operation); + const unit = (seriesName && seriesUnits[seriesName]) ?? 'none'; + return formatMetricUsingUnit(value, unit); }, isGroupedByDate: true, bucketSize, @@ -223,10 +216,9 @@ export const MetricChart = forwardRef( id: MAIN_Y_AXIS_ID, axisLabel: { formatter: (value: number) => { - return formatMetricsUsingUnitAndOp( + return formatMetricUsingUnit( value, - hasMultipleUnits ? 'none' : firstUnit, - firstOperation + hasMultipleUnits ? 'none' : firstUnit ); }, }, @@ -262,7 +254,6 @@ export const MetricChart = forwardRef( samples, focusArea, firstUnit, - firstOperation, ]); return ( diff --git a/static/app/views/ddm/chart/types.tsx b/static/app/views/ddm/chart/types.tsx index 411dcaa8a98d36..346fb1e6b389df 100644 --- a/static/app/views/ddm/chart/types.tsx +++ b/static/app/views/ddm/chart/types.tsx @@ -6,13 +6,13 @@ export type Series = { color: string; data: {name: number; value: number}[]; id: string; - operation: string; seriesName: string; unit: string; groupBy?: Record; hidden?: boolean; paddingIndices?: Set; release?: string; + scalingFactor?: number; transaction?: string; }; diff --git a/static/app/views/ddm/chart/useFocusArea.tsx b/static/app/views/ddm/chart/useFocusArea.tsx index ae9c63985d0749..f8bae653729589 100644 --- a/static/app/views/ddm/chart/useFocusArea.tsx +++ b/static/app/views/ddm/chart/useFocusArea.tsx @@ -14,7 +14,6 @@ import {space} from 'sentry/styles/space'; import type {DateString} from 'sentry/types'; import type {EChartBrushEndHandler, ReactEchartsRef} from 'sentry/types/echarts'; import mergeRefs from 'sentry/utils/mergeRefs'; -import {getMetricConversionFunction} from 'sentry/utils/metrics/normalizeMetricValue'; import {MAIN_X_AXIS_ID, MAIN_Y_AXIS_ID} from 'sentry/views/ddm/chart/chart'; import type {ValueRect} from 'sentry/views/ddm/chart/chartUtils'; import {getValueRect} from 'sentry/views/ddm/chart/chartUtils'; @@ -42,6 +41,7 @@ interface UseFocusAreaOptions { export interface UseFocusAreaProps extends FocusAreaProps { chartRef: RefObject; opts: UseFocusAreaOptions; + scalingFactor: number; chartUnit?: string; onZoom?: (range: DateTimeObject) => void; sampleUnit?: string; @@ -52,8 +52,7 @@ type BrushEndResult = Parameters[0]; export function useFocusArea({ selection: selection, opts: {widgetIndex, isDisabled, useFullYAxis}, - sampleUnit = 'none', - chartUnit = 'none', + scalingFactor, onAdd, onDraw, onRemove, @@ -111,13 +110,11 @@ export function useFocusArea({ return; } - const valueConverter = getMetricConversionFunction(chartUnit, sampleUnit); - const range = getSelectionRange( brushEnd, !!useFullYAxis, getValueRect(chartRef), - valueConverter + scalingFactor ); onAdd?.({ widgetIndex, @@ -134,7 +131,7 @@ export function useFocusArea({ }); isDrawingRef.current = false; }, - [isDisabled, sampleUnit, chartUnit, useFullYAxis, chartRef, onAdd, widgetIndex] + [isDisabled, useFullYAxis, scalingFactor, onAdd, widgetIndex] ); const handleRemove = useCallback(() => { @@ -210,18 +207,16 @@ export function useFocusArea({ onZoom={handleZoomIn} chartRef={chartRef} useFullYAxis={!!useFullYAxis} - sampleUnit={sampleUnit} - chartUnit={chartUnit} + scalingFactor={scalingFactor} /> ) : null, }), [ applyChartProps, - chartUnit, handleRemove, handleZoomIn, hasFocusArea, - sampleUnit, + scalingFactor, selection, useFullYAxis, ] @@ -232,11 +227,10 @@ export type UseFocusAreaResult = ReturnType; type FocusAreaOverlayProps = { chartRef: RefObject; - chartUnit: string; onRemove: () => void; onZoom: () => void; rect: FocusAreaSelection | null; - sampleUnit: string; + scalingFactor: number; useFullYAxis: boolean; }; @@ -246,8 +240,7 @@ function FocusAreaOverlay({ onRemove, useFullYAxis, chartRef, - sampleUnit, - chartUnit, + scalingFactor, }: FocusAreaOverlayProps) { const [position, setPosition] = useState(null); const wrapperRef = useRef(null); @@ -275,9 +268,8 @@ function FocusAreaOverlay({ } const finder = {xAxisId: MAIN_X_AXIS_ID, yAxisId: MAIN_Y_AXIS_ID}; - const valueConverter = getMetricConversionFunction(sampleUnit, chartUnit); - const max = valueConverter(rect.range.max); - const min = valueConverter(rect.range.min); + const max = rect.range.max * scalingFactor; + const min = rect.range.min * scalingFactor; const topLeft = chartInstance.convertToPixel(finder, [ getTimestamp(rect.range.start), @@ -314,7 +306,7 @@ function FocusAreaOverlay({ if (!isEqual(newPosition, position)) { setPosition(newPosition); } - }, [chartRef, rect, sampleUnit, chartUnit, useFullYAxis, position]); + }, [chartRef, rect, scalingFactor, useFullYAxis, position]); useEffect(() => { updatePosition(); @@ -352,7 +344,7 @@ const getSelectionRange = ( params: BrushEndResult, useFullYAxis: boolean, boundingRect: ValueRect, - valueConverter: (value: number) => number + scalingFactor: number ): SelectionRange => { const rect = params.areas[0]; @@ -362,8 +354,8 @@ const getSelectionRange = ( const startDate = getDateString(Math.max(startTimestamp, boundingRect.xMin)); const endDate = getDateString(Math.min(endTimestamp, boundingRect.xMax)); - const min = useFullYAxis ? NaN : valueConverter(Math.min(...rect.coordRange[1])); - const max = useFullYAxis ? NaN : valueConverter(Math.max(...rect.coordRange[1])); + const min = useFullYAxis ? NaN : Math.min(...rect.coordRange[1]) / scalingFactor; + const max = useFullYAxis ? NaN : Math.max(...rect.coordRange[1]) / scalingFactor; return { start: startDate, diff --git a/static/app/views/ddm/chart/useMetricChartSamples.tsx b/static/app/views/ddm/chart/useMetricChartSamples.tsx index 9bb6e068a28709..9f88707de5c1d6 100644 --- a/static/app/views/ddm/chart/useMetricChartSamples.tsx +++ b/static/app/views/ddm/chart/useMetricChartSamples.tsx @@ -8,19 +8,18 @@ import {getFormatter} from 'sentry/components/charts/components/tooltip'; import {isChartHovered} from 'sentry/components/charts/utils'; import type {Field} from 'sentry/components/ddm/metricSamplesTable'; import {t} from 'sentry/locale'; -import type {EChartClickHandler, ReactEchartsRef, Series} from 'sentry/types/echarts'; +import type {EChartClickHandler, ReactEchartsRef} from 'sentry/types/echarts'; import {defined} from 'sentry/utils'; import mergeRefs from 'sentry/utils/mergeRefs'; import {isCumulativeOp} from 'sentry/utils/metrics'; import {formatMetricsUsingUnitAndOp} from 'sentry/utils/metrics/formatters'; -import {getMetricValueNormalizer} from 'sentry/utils/metrics/normalizeMetricValue'; import type {MetricCorrelation, MetricSummary} from 'sentry/utils/metrics/types'; import { getSummaryValueForOp, type MetricsSamplesResults, } from 'sentry/utils/metrics/useMetricsSamples'; import {fitToValueRect, getValueRect} from 'sentry/views/ddm/chart/chartUtils'; -import type {CombinedMetricChartProps} from 'sentry/views/ddm/chart/types'; +import type {CombinedMetricChartProps, Series} from 'sentry/views/ddm/chart/types'; import type {Sample} from 'sentry/views/ddm/widget'; type UseChartSamplesProps = { @@ -60,6 +59,7 @@ export function useMetricChartSamples({ }: UseChartSamplesProps) { const theme = useTheme(); const chartRef = useRef(null); + const scalingFactor = timeseries?.[0]?.scalingFactor ?? 1; const [valueRect, setValueRect] = useState(getValueRect(chartRef)); @@ -144,13 +144,11 @@ export function useMetricChartSamples({ return []; } - const normalizeMetric = getMetricValueNormalizer(unit ?? ''); - return Object.values(samples).map(sample => { const isHighlighted = highlightedSampleId === sample.transactionId; const xValue = moment(sample.timestamp).valueOf(); - const yValue = normalizeMetric(((sample.min ?? 0) + (sample.max ?? 0)) / 2) ?? 0; + const yValue = (((sample.min ?? 0) + (sample.max ?? 0)) / 2) * scalingFactor; const [xPosition, yPosition] = fitToValueRect(xValue, yValue, valueRect); @@ -159,7 +157,7 @@ export function useMetricChartSamples({ return { seriesName: sample.transactionId, - id: sample.transactionId, + id: sample.spanId, operation: '', unit: '', symbolSize: isHighlighted ? 20 : 10, @@ -191,7 +189,14 @@ export function useMetricChartSamples({ z: 10, }; }); - }, [operation, unit, samples, highlightedSampleId, valueRect, theme.purple400]); + }, [ + operation, + samples, + highlightedSampleId, + scalingFactor, + valueRect, + theme.purple400, + ]); const formatterOptions = useMemo(() => { return { @@ -283,6 +288,7 @@ export function useMetricChartSamplesV2({ }: UseMetricChartSamplesV2Options) { const theme = useTheme(); const chartRef = useRef(null); + const timeseriesScalingFactor = timeseries?.[0]?.scalingFactor ?? 1; const [valueRect, setValueRect] = useState(getValueRect(chartRef)); @@ -342,14 +348,12 @@ export function useMetricChartSamplesV2({ return []; } - const normalizeMetric = getMetricValueNormalizer(unit); - return (samples ?? []).map(sample => { const isHighlighted = highlightedSampleId === sample.id; const xValue = moment(sample.timestamp).valueOf(); const value = getSummaryValueForOp(sample.summary, operation); - const yValue = normalizeMetric(value) ?? 0; + const yValue = value * timeseriesScalingFactor; const [xPosition, yPosition] = fitToValueRect(xValue, yValue, valueRect); @@ -385,7 +389,14 @@ export function useMetricChartSamplesV2({ z: 10, }; }); - }, [highlightedSampleId, operation, samples, theme, unit, valueRect]); + }, [ + highlightedSampleId, + operation, + samples, + theme.purple400, + timeseriesScalingFactor, + valueRect, + ]); const formatterOptions = useMemo(() => { return { diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index a74218c1d3c68d..615db3a0af07ee 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -52,6 +52,7 @@ interface DDMContextValue { >; setSelectedWidgetIndex: (index: number) => void; showQuerySymbols: boolean; + toggleWidgetVisibility: (index: number) => void; updateWidget: ( index: number, data: Partial> @@ -80,6 +81,7 @@ export const DDMContext = createContext({ showQuerySymbols: false, updateWidget: () => {}, widgets: [], + toggleWidgetVisibility: () => {}, }); export function useDDMContext() { @@ -159,8 +161,13 @@ export function useMetricWidgets() { const removeWidget = useCallback( (index: number) => { setWidgets(currentWidgets => { - const newWidgets = [...currentWidgets]; + let newWidgets = [...currentWidgets]; newWidgets.splice(index, 1); + + // Ensure that a visible widget remains + if (!newWidgets.find(w => !w.isHidden)) { + newWidgets = newWidgets.map(w => ({...w, isHidden: false})); + } return newWidgets; }); }, @@ -186,6 +193,7 @@ export function useMetricWidgets() { addWidget, removeWidget, duplicateWidget, + setWidgets, }; } @@ -334,16 +342,32 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { (value: boolean) => { updateQuery({multiChartMode: value ? 1 : 0}, {replace: true}); updateWidget(0, {focusedSeries: undefined}); - setSelectedWidgetIndex(0); + const firstVisibleWidgetIndex = widgets.findIndex(w => !w.isHidden); + setSelectedWidgetIndex(firstVisibleWidgetIndex); }, - [updateQuery, updateWidget] + [updateQuery, updateWidget, widgets] ); + const toggleWidgetVisibility = useCallback( + (index: number) => { + if (index === selectedWidgetIndex) { + const firstVisibleWidgetIndex = widgets.findIndex(w => !w.isHidden); + setSelectedWidgetIndex(firstVisibleWidgetIndex); + } + updateWidget(index, {isHidden: !widgets[index].isHidden}); + }, + [selectedWidgetIndex, updateWidget, widgets] + ); + + const selectedWidget = widgets[selectedWidgetIndex]; + const isSelectionValid = selectedWidget && !selectedWidget.isHidden; + const contextValue = useMemo( () => ({ addWidget: handleAddWidget, - selectedWidgetIndex: - selectedWidgetIndex > widgets.length - 1 ? 0 : selectedWidgetIndex, + selectedWidgetIndex: isSelectionValid + ? selectedWidgetIndex + : widgets.findIndex(w => !w.isHidden), setSelectedWidgetIndex: handleSetSelectedWidgetIndex, updateWidget: handleUpdateWidget, removeWidget, @@ -360,9 +384,11 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { setIsMultiChartMode: handleSetIsMultiChartMode, metricsSamples, setMetricsSamples, + toggleWidgetVisibility, }), [ handleAddWidget, + isSelectionValid, selectedWidgetIndex, widgets, handleSetSelectedWidgetIndex, @@ -377,6 +403,7 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { isMultiChartMode, handleSetIsMultiChartMode, metricsSamples, + toggleWidgetVisibility, ] ); diff --git a/static/app/views/ddm/ddmOnboarding/sidebar.tsx b/static/app/views/ddm/ddmOnboarding/sidebar.tsx index 46dcacf2714b38..fbdcc26ec8d99a 100644 --- a/static/app/views/ddm/ddmOnboarding/sidebar.tsx +++ b/static/app/views/ddm/ddmOnboarding/sidebar.tsx @@ -7,10 +7,14 @@ import {LinkButton} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import IdBadge from 'sentry/components/idBadge'; import {SdkDocumentation} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {customMetricPlatforms} from 'sentry/data/platformCategories'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, +} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -18,8 +22,6 @@ import type {Project, SelectValue} from 'sentry/types'; import {METRICS_DOCS_URL} from 'sentry/utils/metrics/constants'; import useOrganization from 'sentry/utils/useOrganization'; -import {useCurrentProjectState} from './useCurrentProjectState'; - function MetricsOnboardingSidebar(props: CommonSidebarProps) { const {currentPanel, collapsed, hidePanel, orientation} = props; const organization = useOrganization(); @@ -36,7 +38,10 @@ function MetricsOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, hasDocs, } = useCurrentProjectState({ - isActive, + currentPanel, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, }); const projectSelectOptions = useMemo(() => { @@ -150,7 +155,7 @@ function OnboardingContent({ : undefined; const supportsCustomMetrics = - currentProject.platform && customMetricPlatforms.has(currentProject.platform); + currentProject.platform && customMetricPlatforms.includes(currentProject.platform); if (!supportsCustomMetrics) { return ( diff --git a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx b/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index 3202ba7e8ea738..00000000000000 --- a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,93 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; -import partition from 'lodash/partition'; - -import { - customMetricOnboardingPlatforms, - customMetricPlatforms, -} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -export function useCurrentProjectState({isActive}: {isActive: boolean}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const [supportedProjects, unsupportedProjects] = useMemo(() => { - return partition(projects, p => p.platform && customMetricPlatforms.has(p.platform)); - }, [projects]); - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - supportedProjects.filter( - p => p.platform && customMetricOnboardingPlatforms.has(p.platform) - ), - [supportedProjects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!supportedProjects) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectWithOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectWithOnboarding) { - setCurrentProject(projectWithOnboarding); - return; - } - - // If we selected something that supports custom metrics pick that - const projectSupportsMetrics = supportedProjects.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsMetrics) { - setCurrentProject(projectSupportsMetrics); - return; - } - // Else pick the first selected project - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - supportedProjects, - ]); - - return { - projects: supportedProjects, - hasDocs: - !!currentProject?.platform && - customMetricOnboardingPlatforms.has(currentProject.platform), - allProjects: projects, - supportedProjects, - unsupportedProjects, - currentProject, - setCurrentProject, - }; -} diff --git a/static/app/views/ddm/pageHeaderActions.tsx b/static/app/views/ddm/pageHeaderActions.tsx index a99eb74aa5cbb9..2d5298ef72e51a 100644 --- a/static/app/views/ddm/pageHeaderActions.tsx +++ b/static/app/views/ddm/pageHeaderActions.tsx @@ -119,6 +119,7 @@ export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Pro , ] diff --git a/static/app/views/ddm/queries.tsx b/static/app/views/ddm/queries.tsx index e31570e41b8265..b891e3ca6a47c5 100644 --- a/static/app/views/ddm/queries.tsx +++ b/static/app/views/ddm/queries.tsx @@ -4,6 +4,7 @@ import * as echarts from 'echarts/core'; import {Button} from 'sentry/components/button'; import SwitchButton from 'sentry/components/switchButton'; +import {Tooltip} from 'sentry/components/tooltip'; import {IconAdd} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -32,6 +33,7 @@ export function Queries() { isMultiChartMode, setIsMultiChartMode, addWidget, + toggleWidgetVisibility, } = useDDMContext(); const {selection} = usePageFilters(); @@ -62,6 +64,8 @@ export function Queries() { return [querySymbolSet, formulaSymbolSet]; }, [widgets]); + const visibleWidgets = widgets.filter(widget => !widget.isHidden); + return ( @@ -71,53 +75,24 @@ export function Queries() { setSelectedWidgetIndex(index)} - role={isMultiChartMode ? 'button' : undefined} - aria-label={t('Select query')} - /> - ) - } - contextMenu={ - - } + showQuerySymbols={showQuerySymbols} + isSelected={index === selectedWidgetIndex} + canBeHidden={visibleWidgets.length > 1} /> ) : ( setSelectedWidgetIndex(index)} - role={isMultiChartMode ? 'button' : undefined} - aria-label={t('Select query')} - /> - ) - } - contextMenu={} + showQuerySymbols={showQuerySymbols} + isSelected={index === selectedWidgetIndex} + canBeHidden={visibleWidgets.length > 1} /> )} @@ -151,21 +126,25 @@ export function Queries() { } interface QueryProps { + canBeHidden: boolean; index: number; + isSelected: boolean; onChange: (index: number, data: Partial) => void; + onToggleVisibility: (index: number) => void; projects: number[]; + showQuerySymbols: boolean; widget: MetricQueryWidgetParams; - contextMenu?: React.ReactNode; - symbol?: React.ReactNode; } -export function Query({ +function Query({ widget, projects, onChange, - contextMenu, - symbol, + onToggleVisibility, index, + isSelected, + showQuerySymbols, + canBeHidden, }: QueryProps) { const metricsQuery = useMemo( () => ({ @@ -177,6 +156,10 @@ export function Query({ [widget.groupBy, widget.mri, widget.op, widget.query] ); + const handleToggle = useCallback(() => { + onToggleVisibility(index); + }, [index, onToggleVisibility]); + const handleChange = useCallback( (data: Partial) => { onChange(index, data); @@ -184,9 +167,19 @@ export function Query({ [index, onChange] ); + const isToggleDisabled = !canBeHidden && !widget.isHidden; + return ( - - {symbol} + + {showQuerySymbols && ( + + )} - {contextMenu} + ); } interface FormulaProps { availableVariables: Set; + canBeHidden: boolean; formulaVariables: Set; index: number; + isSelected: boolean; onChange: (index: number, data: Partial) => void; + onToggleVisibility: (index: number) => void; + showQuerySymbols: boolean; widget: MetricFormulaWidgetParams; - contextMenu?: React.ReactNode; - symbol?: React.ReactNode; } -export function Formula({ +function Formula({ availableVariables, formulaVariables, index, widget, onChange, - contextMenu, - symbol, + onToggleVisibility, + canBeHidden, + isSelected, + showQuerySymbols, }: FormulaProps) { + const handleToggle = useCallback(() => { + onToggleVisibility(index); + }, [index, onToggleVisibility]); + const handleChange = useCallback( - (formula: string) => { - onChange(index, {formula}); + (data: Partial) => { + onChange(index, data); }, [index, onChange] ); + + const isToggleDisabled = !canBeHidden && !widget.isHidden; + return ( - - {symbol} + + {showQuerySymbols && ( + + )} handleChange({formula})} /> - {contextMenu} + ); } +interface QueryToggleProps { + disabled: boolean; + isHidden: boolean; + isSelected: boolean; + onChange: (isHidden: boolean) => void; + queryId: number; +} + +function QueryToggle({ + isHidden, + queryId, + disabled, + onChange, + isSelected, +}: QueryToggleProps) { + let tooltipTitle = isHidden ? t('Show query') : t('Hide query'); + if (disabled) { + tooltipTitle = t('At least one query must be visible'); + } + + return ( + + onChange(!isHidden)} + role="button" + aria-label={isHidden ? t('Show query') : t('Hide query')} + /> + + ); +} + const QueryWrapper = styled('div')<{hasSymbol: boolean}>` display: grid; gap: ${space(1)}; @@ -248,6 +305,7 @@ const QueryWrapper = styled('div')<{hasSymbol: boolean}>` const StyledQuerySymbol = styled(QuerySymbol)<{isClickable: boolean}>` margin-top: 10px; + cursor: not-allowed; ${p => p.isClickable && `cursor: pointer;`} `; diff --git a/static/app/views/ddm/querySymbol.tsx b/static/app/views/ddm/querySymbol.tsx index b09b5fcc22cecf..b8c4f81d7434ab 100644 --- a/static/app/views/ddm/querySymbol.tsx +++ b/static/app/views/ddm/querySymbol.tsx @@ -1,3 +1,4 @@ +import {forwardRef} from 'react'; import styled from '@emotion/styled'; import {space} from 'sentry/styles/space'; @@ -15,7 +16,7 @@ export const getQuerySymbol = (index: number) => { return result; }; -const Symbol = styled('div')<{isSelected: boolean}>` +const Symbol = styled('span')<{isSelected: boolean; isHidden?: boolean}>` display: flex; width: 16px; height: 16px; @@ -32,24 +33,34 @@ const Symbol = styled('div')<{isSelected: boolean}>` ${p => p.isSelected && + !p.isHidden && ` background: ${p.theme.purple300}; color: ${p.theme.white}; `} + + ${p => + p.isHidden && + ` + background: ${p.theme.gray300}; + color: ${p.theme.white}; + `} `; -export function QuerySymbol({ - queryId, - isSelected, - ...props -}: React.ComponentProps & {isSelected: boolean; queryId: number}) { - const {showQuerySymbols, isMultiChartMode} = useDDMContext(); - if (!showQuerySymbols || queryId < 0) { - return null; - } - return ( - - {getQuerySymbol(queryId)} - - ); +interface QuerySymbolProps extends React.ComponentProps { + queryId: number; } + +export const QuerySymbol = forwardRef( + function QuerySymbol({queryId, isSelected, ...props}, ref) { + const {showQuerySymbols, isMultiChartMode} = useDDMContext(); + if (!showQuerySymbols || queryId < 0) { + return null; + } + return ( + + {getQuerySymbol(queryId)} + + ); + } +); diff --git a/static/app/views/ddm/scratchpad.tsx b/static/app/views/ddm/scratchpad.tsx index 6b273b5eb48657..fccbad9255271d 100644 --- a/static/app/views/ddm/scratchpad.tsx +++ b/static/app/views/ddm/scratchpad.tsx @@ -42,7 +42,7 @@ function widgetToQuery( op: widget.op, groupBy: widget.groupBy, query: widget.query, - isQueryOnly: isQueryOnly, + isQueryOnly: isQueryOnly || widget.isHidden, }; } @@ -172,40 +172,42 @@ export function MetricScratchpad() { return ( {isMultiChartMode ? ( - filteredWidgets.map((widget, index) => ( - - {queries => ( - 1} - onChange={handleChange} - filters={selection} - focusAreaProps={focusArea} - showQuerySymbols={showQuerySymbols} - onSampleClick={handleSampleClick} - onSampleClickV2={handleSampleClickV2} - chartHeight={200} - highlightedSampleId={ - selectedWidgetIndex === index ? highlightedSampleId : undefined - } - metricsSamples={metricsSamples} - context="ddm" - /> - )} - - )) + filteredWidgets.map((widget, index) => + widget.isHidden ? null : ( + + {queries => ( + 1} + onChange={handleChange} + filters={selection} + focusAreaProps={focusArea} + showQuerySymbols={showQuerySymbols} + onSampleClick={handleSampleClick} + onSampleClickV2={handleSampleClickV2} + chartHeight={200} + highlightedSampleId={ + selectedWidgetIndex === index ? highlightedSampleId : undefined + } + metricsSamples={metricsSamples} + context="ddm" + /> + )} + + ) + ) ) : ( widgetToQuery(w))} + queries={filteredWidgets + .filter(w => !(w.type === MetricQueryType.FORMULA && w.isHidden)) + .map(w => widgetToQuery(w))} isSelected hasSiblings={false} onChange={handleChange} diff --git a/static/app/views/ddm/summaryTable.tsx b/static/app/views/ddm/summaryTable.tsx index 5e95e7729ee7b4..11f96699acbf0a 100644 --- a/static/app/views/ddm/summaryTable.tsx +++ b/static/app/views/ddm/summaryTable.tsx @@ -13,7 +13,7 @@ import {space} from 'sentry/styles/space'; import {trackAnalytics} from 'sentry/utils/analytics'; import {getUtcDateString} from 'sentry/utils/dates'; import {DEFAULT_SORT_STATE} from 'sentry/utils/metrics/constants'; -import {formatMetricsUsingUnitAndOp} from 'sentry/utils/metrics/formatters'; +import {formatMetricUsingUnit} from 'sentry/utils/metrics/formatters'; import type {FocusedMetricsSeries, SortState} from 'sentry/utils/metrics/types'; import useOrganization from 'sentry/utils/useOrganization'; import usePageFilters from 'sentry/utils/usePageFilters'; @@ -118,6 +118,8 @@ export const SummaryTable = memo(function SummaryTable({ ...getValues(s.data), }; }) + // Filter series with no data + .filter(s => s.min !== Infinity) .sort((a, b) => { const {name, order} = sort; if (!name) { @@ -172,7 +174,6 @@ export const SummaryTable = memo(function SummaryTable({ color, hidden, unit, - operation, transaction, release, avg, @@ -229,18 +230,10 @@ export const SummaryTable = memo(function SummaryTable({ {/* TODO(ddm): Add a tooltip with the full value, don't add on click in case users want to copy the value */} - - {formatMetricsUsingUnitAndOp(avg, unit, operation)} - - - {formatMetricsUsingUnitAndOp(min, unit, operation)} - - - {formatMetricsUsingUnitAndOp(max, unit, operation)} - - - {formatMetricsUsingUnitAndOp(sum, unit, operation)} - + {formatMetricUsingUnit(avg, unit)} + {formatMetricUsingUnit(min, unit)} + {formatMetricUsingUnit(max, unit)} + {formatMetricUsingUnit(sum, unit)} {hasActions && ( diff --git a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx index 79700ff8ad3e4c..7286dfd2714ee0 100644 --- a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx +++ b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx @@ -1,358 +1,398 @@ import {emptyMetricsQueryWidget} from 'sentry/utils/metrics/constants'; -import {MetricQueryType} from 'sentry/utils/metrics/types'; +import { + MetricDisplayType, + MetricQueryType, + type MetricWidgetQueryParams, +} from 'sentry/utils/metrics/types'; import {parseMetricWidgetsQueryParam} from 'sentry/views/ddm/utils/parseMetricWidgetsQueryParam'; +function testParsing(input: any, result: MetricWidgetQueryParams[]) { + expect(parseMetricWidgetsQueryParam(JSON.stringify(input))).toStrictEqual(result); +} + describe('parseMetricWidgetQueryParam', () => { const defaultState = [{...emptyMetricsQueryWidget, id: 0}]; it('returns default widget for invalid param', () => { - expect(parseMetricWidgetsQueryParam(undefined)).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('{}')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('true')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('2')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('"test"')).toStrictEqual(defaultState); + testParsing(undefined, defaultState); + testParsing({}, defaultState); + testParsing(true, defaultState); + testParsing(2, defaultState); + testParsing('', defaultState); + testParsing('test', defaultState); // empty array is not valid - expect(parseMetricWidgetsQueryParam('[]')).toStrictEqual(defaultState); + testParsing([], defaultState); }); it('returns a single widget', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {order: 'asc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + [ + // INPUT + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {order: 'asc'}, + isHidden: true, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: undefined, order: 'asc'}, + isHidden: true, + }, + ] + ); }); it('returns multiple widgets', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - { - id: 1, - type: MetricQueryType.QUERY, - mri: 'd:custom/sentry.event_manager.save@second', - op: 'avg', - query: '', - groupBy: ['event_type'], - displayType: 'line', - powerUserMode: false, - focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], - sort: {name: 'sum', order: 'asc'}, - }, - { - id: 2, - type: MetricQueryType.FORMULA, - formula: 'a + b', - displayType: 'line', - sort: {name: 'avg', order: 'desc'}, - focusedSeries: [], - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - { - id: 1, - type: MetricQueryType.QUERY, - mri: 'd:custom/sentry.event_manager.save@second', - op: 'avg', - query: '', - groupBy: ['event_type'], - displayType: 'line', - powerUserMode: false, - focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], - sort: {name: 'sum', order: 'asc'}, - }, - { - id: 2, - type: MetricQueryType.FORMULA, - formula: 'a + b', - displayType: 'line', - sort: {name: 'avg', order: 'desc'}, - focusedSeries: [], - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: true, + }, + { + id: 1, + type: MetricQueryType.QUERY, + mri: 'd:custom/sentry.event_manager.save@second', + op: 'avg', + query: '', + groupBy: ['event_type'], + displayType: 'line', + powerUserMode: false, + focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], + sort: {name: 'sum', order: 'asc'}, + isHidden: false, + }, + { + id: 2, + type: MetricQueryType.FORMULA, + formula: 'a + b', + displayType: 'line', + sort: {name: 'avg', order: 'desc'}, + focusedSeries: [], + isHidden: true, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: true, + }, + { + id: 1, + type: MetricQueryType.QUERY, + mri: 'd:custom/sentry.event_manager.save@second', + op: 'avg', + query: '', + groupBy: ['event_type'], + displayType: MetricDisplayType.LINE, + powerUserMode: false, + focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], + sort: {name: 'sum', order: 'asc'}, + isHidden: false, + }, + { + id: 2, + type: MetricQueryType.FORMULA, + formula: 'a + b', + displayType: MetricDisplayType.LINE, + sort: {name: 'avg', order: 'desc'}, + focusedSeries: [], + isHidden: true, + }, + ] + ); }); it('falls back to defaults', () => { // Missing values - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - mri: 'd:transactions/duration@millisecond', - }, - { - type: MetricQueryType.FORMULA, - formula: 'a * 2', - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - { - id: 1, - type: MetricQueryType.FORMULA, - formula: 'a * 2', - displayType: 'line', - focusedSeries: [], - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + }, + { + type: MetricQueryType.FORMULA, + formula: 'a * 2', + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + { + id: 1, + type: MetricQueryType.FORMULA, + formula: 'a * 2', + displayType: MetricDisplayType.LINE, + focusedSeries: [], + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); // Invalid values - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 'invalid', - type: 123, - mri: 'd:transactions/duration@millisecond', - op: 1, - query: 12, - groupBy: true, - displayType: 'aasfcsdf', - focusedSeries: {}, - powerUserMode: 1, - sort: {name: 1, order: 'invalid'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 'invalid', + type: 123, + mri: 'd:transactions/duration@millisecond', + op: 1, + query: 12, + groupBy: true, + displayType: 'aasfcsdf', + focusedSeries: {}, + powerUserMode: 1, + sort: {name: 1, order: 'invalid'}, + isHidden: 'foo', + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); }); it('ignores invalid widgets', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - mri: 'd:transactions/duration@millisecond', - }, - { - // Missing MRI - }, - { - // Mallformed MRI - mri: 'transactions/duration@millisecond', - }, - { - // Duplicate id - id: 0, - mri: 'd:transactions/duration@second', - }, - { - // Missing formula - type: MetricQueryType.FORMULA, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + mri: 'd:transactions/duration@millisecond', + }, + { + // Missing MRI + }, + { + // Mallformed MRI + mri: 'transactions/duration@millisecond', + }, + { + // Duplicate id + id: 0, + mri: 'd:transactions/duration@second', + }, + { + // Missing formula + type: MetricQueryType.FORMULA, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); }); it('returns default widget if there is no valid widget', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - // Missing MRI - }, - { - // Missing formula - type: MetricQueryType.FORMULA, - }, - ]) - ) - ).toStrictEqual(defaultState); + testParsing( + // INPUT + [ + { + // Missing MRI + }, + { + // Missing formula + type: MetricQueryType.FORMULA, + }, + ], + // RESULT + defaultState + ); }); it('handles missing array in array params', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: 'dist', - displayType: 'line', - focusedSeries: {id: 'default', groupBy: {dist: 'default'}}, - powerUserMode: true, - sort: {order: 'asc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: 'dist', + displayType: 'line', + focusedSeries: {id: 'default', groupBy: {dist: 'default'}}, + powerUserMode: true, + sort: {order: 'asc'}, + isHidden: false, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); + }); + + it('adds missing ids', () => { + function widgetWithId(id: T) { + return { + id, + type: MetricQueryType.QUERY as const, + mri: 'd:transactions/duration@millisecond' as const, + op: 'sum' as const, query: 'test:query', groupBy: ['dist'], - displayType: 'line', + displayType: MetricDisplayType.LINE, focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], powerUserMode: true, - sort: {name: undefined, order: 'asc'}, - }, - ]); - }); + sort: {name: 'avg' as const, order: 'desc' as const}, + isHidden: false, + }; + } - it('adds missing ids', () => { - const widgetWithId = (id: number | undefined) => ({ - id, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }); - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - widgetWithId(0), - widgetWithId(undefined), - widgetWithId(2), - { - // Invalid widget - }, - widgetWithId(undefined), - widgetWithId(3), - ]) - ) - ).toStrictEqual([ - widgetWithId(0), - widgetWithId(1), - widgetWithId(2), - widgetWithId(4), - widgetWithId(3), - ]); + testParsing( + // INPUT + [ + widgetWithId(0), + widgetWithId(undefined), + widgetWithId(2), + { + // Invalid widget + }, + widgetWithId(undefined), + widgetWithId(3), + ], + // RESULT + [ + widgetWithId(0), + widgetWithId(1), + widgetWithId(2), + widgetWithId(4), + widgetWithId(3), + ] + ); }); it('resets the id of a single widget to 0', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 5, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 5, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: false, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: false, + }, + ] + ); }); }); diff --git a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx index a6db62f6a7829a..5246e600c0c8a7 100644 --- a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx +++ b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx @@ -214,6 +214,7 @@ export function parseMetricWidgetsQueryParam( : MetricDisplayType.LINE, focusedSeries: parseArrayParam(widget, 'focusedSeries', parseFocusedSeries), sort: parseSortParam(widget, 'sort'), + isHidden: parseBooleanParam(widget, 'isHidden') ?? false, }; switch (type) { diff --git a/static/app/views/ddm/utils/useStructuralSharing.tsx b/static/app/views/ddm/utils/useStructuralSharing.tsx index 4f6deb0323b74e..e052fc6d37a52d 100644 --- a/static/app/views/ddm/utils/useStructuralSharing.tsx +++ b/static/app/views/ddm/utils/useStructuralSharing.tsx @@ -56,11 +56,11 @@ export function structuralSharing(oldValue: T, newValue: T): T { return newValue; } -export const useStructuralSharing = (value: any) => { - const previeousValue = useRef(value); +export function useStructuralSharing(value: T): T { + const previousValue = useRef(value); return useMemo(() => { - const newValue = structuralSharing(previeousValue.current, value); - previeousValue.current = newValue; + const newValue = structuralSharing(previousValue.current, value); + previousValue.current = newValue; return newValue; }, [value]); -}; +} diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 39aa8fe8ccfa39..ceed27e153459f 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -12,6 +12,7 @@ import type {SelectOption} from 'sentry/components/compactSelect'; import {CompactSelect} from 'sentry/components/compactSelect'; import type {Field} from 'sentry/components/ddm/metricSamplesTable'; import EmptyMessage from 'sentry/components/emptyMessage'; +import ErrorBoundary from 'sentry/components/errorBoundary'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import Panel from 'sentry/components/panels/panel'; import PanelBody from 'sentry/components/panels/panelBody'; @@ -31,10 +32,6 @@ import { } from 'sentry/utils/metrics'; import {metricDisplayTypeOptions} from 'sentry/utils/metrics/constants'; import {formatMRIField, MRIToField, parseMRI} from 'sentry/utils/metrics/mri'; -import { - getMetricValueNormalizer, - getNormalizedMetricUnit, -} from 'sentry/utils/metrics/normalizeMetricValue'; import type { FocusedMetricsSeries, MetricCorrelation, @@ -244,22 +241,24 @@ export const MetricWidget = memo( {queriesAreComplete ? ( - + + + ) : ( isCumulativeOp(s.operation)); - const firstUnit = - chartSeries.find(s => !s.hidden)?.unit || chartSeries[0]?.unit || 'none'; + const hasCumulativeOp = queries.some( + q => !isMetricFormula(q) && isCumulativeOp(q.op) + ); + const firstScalingFactor = chartSeries.find(s => !s.hidden)?.scalingFactor || 1; const focusArea = useFocusArea({ ...focusAreaProps, - sampleUnit: samples?.unit, - chartUnit: firstUnit, + scalingFactor: firstScalingFactor, chartRef, opts: { widgetIndex, @@ -519,35 +518,23 @@ export function getChartTimeseries( const series = data.data.flatMap((group, index) => { const query = filteredQueries[index]; - const metaUnit = data.meta[index]?.[1]?.unit; - + const meta = data.meta[index]; + const lastMetaEntry = meta[meta.length - 1]; + const unit = + (lastMetaEntry && 'unit' in lastMetaEntry && lastMetaEntry.unit) || 'none'; + const scalingFactor = + (lastMetaEntry && + 'scaling_factor' in lastMetaEntry && + lastMetaEntry.scaling_factor) || + 1; + const operation = isMetricFormula(query) ? 'count' : query.op; const isMultiQuery = filteredQueries.length > 1; - let unit = ''; - let operation = ''; - if (!isMetricFormula(query)) { - const parsed = parseMRI(query.mri); - unit = parsed?.unit ?? ''; - operation = query.op ?? ''; - } else { - // Treat formulas as if they were a single query with none as the unit and count as the operation - unit = 'none'; - } - - // TODO(arthur): fully switch to using the meta unit once it's available - if (metaUnit) { - unit = metaUnit; - } - - // We normalize metric units to make related units - // (e.g. seconds & milliseconds) render in the correct ratio - const normalizedUnit = getNormalizedMetricUnit(unit, operation); - const normalizeValue = getMetricValueNormalizer(unit, operation); - return group.map(entry => ({ - unit: normalizedUnit, + unit: unit, operation: operation, - values: entry.series.map(normalizeValue), + values: entry.series, + scalingFactor: scalingFactor, name: getMetricsSeriesName(query, entry.by, isMultiQuery), id: getMetricsSeriesId(query, entry.by), groupBy: entry.by, @@ -563,6 +550,7 @@ export function getChartTimeseries( seriesName: item.name, groupBy: item.groupBy, unit: item.unit, + scalingFactor: item.scalingFactor, operation: item.operation, color: chartPalette[item.id], hidden: focusedSeries && focusedSeries.size > 0 && !focusedSeries.has(item.id), diff --git a/static/app/views/discover/index.spec.tsx b/static/app/views/discover/index.spec.tsx index 1c9fb11f9b7b38..d4c844107d309e 100644 --- a/static/app/views/discover/index.spec.tsx +++ b/static/app/views/discover/index.spec.tsx @@ -1,10 +1,10 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {RouteComponentPropsFixture} from 'sentry-fixture/routeComponentPropsFixture'; import {RouterContextFixture} from 'sentry-fixture/routerContextFixture'; import {render, screen} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ProjectsStore from 'sentry/stores/projectsStore'; import {DiscoverLanding} from 'sentry/views/discover/landing'; @@ -84,7 +84,7 @@ describe('Discover > Landing', function () { expect(screen.getByText("You don't have access to this feature")).toBeInTheDocument(); }); - it('has the right sorts', function () { + it('has the right sorts', async function () { const org = OrganizationFixture({features}); render(); @@ -101,7 +101,7 @@ describe('Discover > Landing', function () { ]; // Open menu - selectEvent.openMenu(screen.getByRole('button', {name: 'Sort By My Queries'})); + await selectEvent.openMenu(screen.getByRole('button', {name: 'Sort By My Queries'})); // Check that all sorts are there expectedSorts.forEach(sort => diff --git a/static/app/views/discover/results.spec.tsx b/static/app/views/discover/results.spec.tsx index 5a88db2ee42522..3538147e21c4ce 100644 --- a/static/app/views/discover/results.spec.tsx +++ b/static/app/views/discover/results.spec.tsx @@ -1,11 +1,11 @@ import {browserHistory} from 'react-router'; -import selectEvent from 'react-select-event'; import {LocationFixture} from 'sentry-fixture/locationFixture'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {ProjectFixture} from 'sentry-fixture/project'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import * as PageFilterPersistence from 'sentry/components/organizations/pageFilters/persistence'; import ProjectsStore from 'sentry/stores/projectsStore'; diff --git a/static/app/views/discover/results.tsx b/static/app/views/discover/results.tsx index bcdcf016179d0f..cf8df209643f6c 100644 --- a/static/app/views/discover/results.tsx +++ b/static/app/views/discover/results.tsx @@ -299,14 +299,8 @@ export class Results extends Component { // If the view is not valid, redirect to a known valid state. const {location, organization, selection, isHomepage, savedQuery} = this.props; - const isReplayEnabled = organization.features.includes('session-replay'); - const defaultEventView = Object.assign({}, DEFAULT_EVENT_VIEW, { - fields: isReplayEnabled - ? DEFAULT_EVENT_VIEW.fields.concat(['replayId']) - : DEFAULT_EVENT_VIEW.fields, - }); - const query = isHomepage && savedQuery ? omit(savedQuery, 'id') : defaultEventView; + const query = isHomepage && savedQuery ? omit(savedQuery, 'id') : DEFAULT_EVENT_VIEW; const nextEventView = EventView.fromNewQueryWithLocation(query, location); if (nextEventView.project.length === 0 && selection.projects) { nextEventView.project = selection.projects; diff --git a/static/app/views/integrationOrganizationLink/index.spec.tsx b/static/app/views/integrationOrganizationLink/index.spec.tsx index 98597e5602e9c0..e25fa836a48d14 100644 --- a/static/app/views/integrationOrganizationLink/index.spec.tsx +++ b/static/app/views/integrationOrganizationLink/index.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import pick from 'lodash/pick'; import {ConfigFixture} from 'sentry-fixture/config'; import {OrganizationFixture} from 'sentry-fixture/organization'; @@ -6,6 +5,7 @@ import {VercelProviderFixture} from 'sentry-fixture/vercelIntegration'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ConfigStore from 'sentry/stores/configStore'; import {generateOrgSlugUrl} from 'sentry/utils'; diff --git a/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx b/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx index 010206179951b1..02c17fddb82167 100644 --- a/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx +++ b/static/app/views/integrationPipeline/awsLambdaCloudformation.spec.tsx @@ -1,8 +1,8 @@ -import selectEvent from 'react-select-event'; import * as qs from 'query-string'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import AwsLambdaCloudformation from 'sentry/views/integrationPipeline/awsLambdaCloudformation'; diff --git a/static/app/views/integrationPipeline/awsLambdaProjectSelect.spec.tsx b/static/app/views/integrationPipeline/awsLambdaProjectSelect.spec.tsx index 3fe0fffbf72de9..34e1f36a010921 100644 --- a/static/app/views/integrationPipeline/awsLambdaProjectSelect.spec.tsx +++ b/static/app/views/integrationPipeline/awsLambdaProjectSelect.spec.tsx @@ -1,7 +1,7 @@ -import selectEvent from 'react-select-event'; import {ProjectFixture} from 'sentry-fixture/project'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import AwsLambdaProjectSelect from 'sentry/views/integrationPipeline/awsLambdaProjectSelect'; diff --git a/static/app/views/issueDetails/groupPriority.tsx b/static/app/views/issueDetails/groupPriority.tsx index 4242620c252221..18c5af1ec7756c 100644 --- a/static/app/views/issueDetails/groupPriority.tsx +++ b/static/app/views/issueDetails/groupPriority.tsx @@ -44,10 +44,16 @@ function GroupPriority({group}: GroupDetailsPriorityProps) { ); }; + // We can assume that when there is not `priorityLockedAt`, there were no + // user edits to the priority. + const lastEditedBy = !group.priorityLockedAt ? 'system' : undefined; + return ( ); } diff --git a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx index 4d157dedc3f8dc..85e489eca47bfc 100644 --- a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx +++ b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx @@ -22,6 +22,8 @@ type InitializeOrgProps = { }; import {ReplayListFixture} from 'sentry-fixture/replayList'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + const REPLAY_ID_1 = '346789a703f6454384f1de473b8b9fcc'; const REPLAY_ID_2 = 'b05dae9b6be54d21a4d5ad9f8f02b780'; @@ -61,6 +63,9 @@ describe('GroupReplays', () => { body: [], }); }); + afterEach(() => { + resetMockDate(); + }); describe('Replay Feature Disabled', () => { const mockGroup = GroupFixture(); @@ -330,7 +335,7 @@ describe('GroupReplays', () => { }); // Mock the system date to be 2022-09-28 - jest.useFakeTimers().setSystemTime(new Date('Sep 28, 2022 11:29:13 PM UTC')); + setMockDate(new Date('Sep 28, 2022 11:29:13 PM UTC')); render(, { context: routerContext, diff --git a/static/app/views/issueDetails/quickTrace/index.spec.tsx b/static/app/views/issueDetails/quickTrace/index.spec.tsx deleted file mode 100644 index 952476bda059ff..00000000000000 --- a/static/app/views/issueDetails/quickTrace/index.spec.tsx +++ /dev/null @@ -1,33 +0,0 @@ -import {EventFixture} from 'sentry-fixture/event'; -import {GroupFixture} from 'sentry-fixture/group'; -import {LocationFixture} from 'sentry-fixture/locationFixture'; -import {OrganizationFixture} from 'sentry-fixture/organization'; - -import {render} from 'sentry-test/reactTestingLibrary'; - -import QuickTrace from 'sentry/views/issueDetails/quickTrace'; - -describe('IssueQuickTrace', () => { - const defaultProps = { - organization: OrganizationFixture({features: ['performance-view']}), - event: EventFixture({contexts: {trace: {trace_id: 100}}}), - group: GroupFixture(), - location: LocationFixture(), - }; - - it('renders nothing without performance-view flag', () => { - const {container} = render( - - ); - - expect(container).toBeEmptyDOMElement(); - }); - - it('renders nothing if event does not have a trace context', () => { - const {container} = render( - - ); - - expect(container).toBeEmptyDOMElement(); - }); -}); diff --git a/static/app/views/issueDetails/quickTrace/index.tsx b/static/app/views/issueDetails/quickTrace/index.tsx deleted file mode 100644 index 11902fcdc9ee1f..00000000000000 --- a/static/app/views/issueDetails/quickTrace/index.tsx +++ /dev/null @@ -1,53 +0,0 @@ -import {useContext} from 'react'; -import {useTheme} from '@emotion/react'; -import styled from '@emotion/styled'; -import type {Location} from 'history'; - -import Placeholder from 'sentry/components/placeholder'; -import {space} from 'sentry/styles/space'; -import type {Organization} from 'sentry/types'; -import type {Event} from 'sentry/types/event'; -import {QuickTraceContext} from 'sentry/utils/performance/quickTrace/quickTraceContext'; -import useMedia from 'sentry/utils/useMedia'; - -import IssueQuickTrace from './issueQuickTrace'; - -type Props = { - event: Event; - location: Location; - organization: Organization; -}; - -function QuickTrace({event, organization, location}: Props) { - const theme = useTheme(); - const hasPerformanceView = organization.features.includes('performance-view'); - const hasTraceContext = Boolean(event.contexts?.trace?.trace_id); - const quickTrace = useContext(QuickTraceContext); - - const isSmallViewport = useMedia(`(max-width: ${theme.breakpoints.small})`); - - if (isSmallViewport || !hasPerformanceView || !hasTraceContext) { - return null; - } - - if (quickTrace?.isLoading) { - return ; - } - - return ( - - ); -} - -const TracePlaceholder = styled(Placeholder)` - width: auto; - max-width: 300px; - margin-top: ${space(0.75)}; -`; - -export default QuickTrace; diff --git a/static/app/views/issueDetails/quickTrace/issueQuickTrace.tsx b/static/app/views/issueDetails/quickTrace/issueQuickTrace.tsx deleted file mode 100644 index 70b5eee8f81fa5..00000000000000 --- a/static/app/views/issueDetails/quickTrace/issueQuickTrace.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import {Fragment} from 'react'; -import styled from '@emotion/styled'; -import type {Location} from 'history'; - -import ErrorBoundary from 'sentry/components/errorBoundary'; -import QuickTrace from 'sentry/components/quickTrace'; -import {space} from 'sentry/styles/space'; -import type {Organization} from 'sentry/types'; -import type {Event} from 'sentry/types/event'; -import {defined} from 'sentry/utils'; -import TraceMetaQuery from 'sentry/utils/performance/quickTrace/traceMetaQuery'; -import type {QuickTraceQueryChildrenProps} from 'sentry/utils/performance/quickTrace/types'; -import {getTraceTimeRangeFromEvent} from 'sentry/utils/performance/quickTrace/utils'; -import useRouteAnalyticsParams from 'sentry/utils/routeAnalytics/useRouteAnalyticsParams'; -import {TraceLink} from 'sentry/views/issueDetails/quickTrace/traceLink'; - -type Props = { - event: Event; - location: Location; - organization: Organization; - quickTrace: undefined | QuickTraceQueryChildrenProps; -}; - -function IssueQuickTrace({event, location, organization, quickTrace}: Props) { - const isTraceMissing = - !quickTrace || - quickTrace.error || - ((!defined(quickTrace.trace) || quickTrace.trace.length === 0) && - (!quickTrace.orphanErrors || quickTrace.orphanErrors?.length === 0)); - const traceId = event.contexts?.trace?.trace_id ?? ''; - const {start, end} = getTraceTimeRangeFromEvent(event); - - useRouteAnalyticsParams({ - trace_status: isTraceMissing - ? quickTrace?.type === 'missing' - ? 'transaction missing' - : 'trace missing' - : 'success', - }); - - if (isTraceMissing) { - return ( - - - - ); - } - - return ( - - - - {metaResults => ( - - - - - )} - - - - ); -} - -const QuickTraceWrapper = styled('div')` - display: flex; - align-items: center; - gap: ${space(0.75)}; - flex-wrap: wrap; - margin-top: ${space(0.75)}; - height: 20px; -`; - -export default IssueQuickTrace; diff --git a/static/app/views/issueDetails/quickTrace/usePromptCheck.tsx b/static/app/views/issueDetails/quickTrace/usePromptCheck.tsx deleted file mode 100644 index af6e67d8a44b7e..00000000000000 --- a/static/app/views/issueDetails/quickTrace/usePromptCheck.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import {useCallback, useEffect, useState} from 'react'; - -import {promptsCheck, promptsUpdate} from 'sentry/actionCreators/prompts'; -import type {Organization} from 'sentry/types'; -import {promptIsDismissed} from 'sentry/utils/promptIsDismissed'; -import useApi from 'sentry/utils/useApi'; - -type Opts = { - feature: string; - organization: Organization; - projectId: string; -}; - -function usePromptCheck({feature, organization, projectId}: Opts) { - const api = useApi(); - - const [shouldShowPrompt, setShouldShow] = useState(null); - - useEffect(() => { - promptsCheck(api, { - organization, - projectId, - feature, - }).then(data => { - setShouldShow(!promptIsDismissed(data ?? {}, 30)); - }); - }, [api, feature, organization, projectId]); - - const snoozePrompt = useCallback(async () => { - const data = { - projectId, - organization, - feature, - status: 'snoozed' as const, - }; - await promptsUpdate(api, data); - setShouldShow(false); - }, [api, feature, organization, projectId]); - - return { - shouldShowPrompt, - snoozePrompt, - }; -} - -export default usePromptCheck; diff --git a/static/app/views/issueList/actions/headers.tsx b/static/app/views/issueList/actions/headers.tsx index ec71f79985c11e..cd7005686860ad 100644 --- a/static/app/views/issueList/actions/headers.tsx +++ b/static/app/views/issueList/actions/headers.tsx @@ -129,7 +129,7 @@ const EventsOrUsersLabel = styled(ToolbarHeader)` const PriorityLabel = styled('div')<{isSavedSearchesOpen?: boolean}>` justify-content: flex-end; text-align: right; - width: 85px; + width: 70px; margin: 0 ${space(2)}; /* prettier-ignore */ diff --git a/static/app/views/issueList/utils.spec.tsx b/static/app/views/issueList/utils.spec.tsx index e1cfd8e8bf40ac..54578d700ec065 100644 --- a/static/app/views/issueList/utils.spec.tsx +++ b/static/app/views/issueList/utils.spec.tsx @@ -16,10 +16,12 @@ describe('getTabs', () => { ]); }); - it('should add inbox tab for issue-priority-ui feature', () => { - expect(getTabs(OrganizationFixture({features: ['issue-priority-ui']}))[0]).toEqual([ + it('should replace "unresolved" with "prioritized" for issue-priority-ui feature', () => { + const tabs = getTabs(OrganizationFixture({features: ['issue-priority-ui']})); + + expect(tabs[0]).toEqual([ 'is:unresolved issue.priority:[high, medium]', - expect.objectContaining({name: 'Inbox'}), + expect.objectContaining({name: 'Prioritized'}), ]); }); }); diff --git a/static/app/views/issueList/utils.tsx b/static/app/views/issueList/utils.tsx index 677c930c5be476..3ae084a8cfea86 100644 --- a/static/app/views/issueList/utils.tsx +++ b/static/app/views/issueList/utils.tsx @@ -5,7 +5,7 @@ import type {Organization} from 'sentry/types'; export enum Query { FOR_REVIEW = 'is:unresolved is:for_review assigned_or_suggested:[me, my_teams, none]', - INBOX = NEW_DEFAULT_QUERY, + PRIORITIZED = NEW_DEFAULT_QUERY, UNRESOLVED = 'is:unresolved', IGNORED = 'is:ignored', NEW = 'is:new', @@ -46,14 +46,16 @@ type OverviewTab = { * Get a list of currently active tabs */ export function getTabs(organization: Organization) { + const hasIssuePriority = organization.features.includes('issue-priority-ui'); + const tabs: Array<[string, OverviewTab]> = [ [ - Query.INBOX, + Query.PRIORITIZED, { - name: t('Inbox'), - analyticsName: 'inbox', + name: t('Prioritized'), + analyticsName: 'prioritized', count: true, - enabled: organization.features.includes('issue-priority-ui'), + enabled: hasIssuePriority, }, ], [ @@ -62,7 +64,7 @@ export function getTabs(organization: Organization) { name: t('Unresolved'), analyticsName: 'unresolved', count: true, - enabled: true, + enabled: !hasIssuePriority, }, ], [ diff --git a/static/app/views/monitors/components/monitorForm.spec.tsx b/static/app/views/monitors/components/monitorForm.spec.tsx index 042ec6e04dbab1..efb188c0812939 100644 --- a/static/app/views/monitors/components/monitorForm.spec.tsx +++ b/static/app/views/monitors/components/monitorForm.spec.tsx @@ -1,4 +1,3 @@ -import selectEvent from 'react-select-event'; import {MemberFixture} from 'sentry-fixture/member'; import {MonitorFixture} from 'sentry-fixture/monitor'; import {OrganizationFixture} from 'sentry-fixture/organization'; @@ -7,6 +6,7 @@ import {UserFixture} from 'sentry-fixture/user'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import {useMembers} from 'sentry/utils/useMembers'; import useProjects from 'sentry/utils/useProjects'; @@ -118,7 +118,7 @@ describe('MonitorForm', function () { const notifySelect = screen.getByRole('textbox', {name: 'Notify'}); - selectEvent.openMenu(notifySelect); + await selectEvent.openMenu(notifySelect); expect( screen.getByRole('menuitemcheckbox', {name: 'John Smith'}) ).toBeInTheDocument(); @@ -195,7 +195,7 @@ describe('MonitorForm', function () { expect(screen.getByText(project.slug)).toBeInTheDocument(); // Schedule type - selectEvent.openMenu(screen.getByRole('textbox', {name: 'Schedule Type'})); + await selectEvent.openMenu(screen.getByRole('textbox', {name: 'Schedule Type'})); const crontabOption = screen.getByRole('menuitemradio', {name: 'Crontab'}); expect(crontabOption).toBeChecked(); await userEvent.click(crontabOption); @@ -206,7 +206,7 @@ describe('MonitorForm', function () { ); // Schedule timezone - selectEvent.openMenu(screen.getByRole('textbox', {name: 'Timezone'})); + await selectEvent.openMenu(screen.getByRole('textbox', {name: 'Timezone'})); const losAngelesOption = screen.getByRole('menuitemradio', {name: 'Los Angeles'}); expect(losAngelesOption).toBeChecked(); await userEvent.click(losAngelesOption); @@ -220,7 +220,7 @@ describe('MonitorForm', function () { expect(screen.getByRole('spinbutton', {name: 'Recovery Tolerance'})).toHaveValue(2); // Alert rule configuration - selectEvent.openMenu(screen.getByRole('textbox', {name: 'Notify'})); + await selectEvent.openMenu(screen.getByRole('textbox', {name: 'Notify'})); const memberOption = screen.getByRole('menuitemcheckbox', {name: member.user?.name}); expect(memberOption).toBeChecked(); await userEvent.keyboard('{Escape}'); diff --git a/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx b/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx index 42b865b7738262..558ace40f12466 100644 --- a/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx +++ b/static/app/views/monitors/components/overviewTimeline/timelineTableRow.tsx @@ -6,6 +6,7 @@ import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; import {openConfirmModal} from 'sentry/components/confirm'; import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import ProjectBadge from 'sentry/components/idBadge/projectBadge'; import Tag from 'sentry/components/tag'; import {Tooltip} from 'sentry/components/tooltip'; import {IconEllipsis} from 'sentry/icons'; @@ -13,6 +14,7 @@ import {t, tct} from 'sentry/locale'; import {fadeIn} from 'sentry/styles/animations'; import {space} from 'sentry/styles/space'; import type {ObjectStatus} from 'sentry/types'; +import {trimSlug} from 'sentry/utils/trimSlug'; import useOrganization from 'sentry/utils/useOrganization'; import {StatusToggleButton} from 'sentry/views/monitors/components/statusToggleButton'; import type {Monitor} from 'sentry/views/monitors/types'; @@ -68,7 +70,18 @@ export function TimelineTableRow({ {monitor.name} {isDisabled && {t('Disabled')}} - {scheduleAsText(monitor.config)} + + {scheduleAsText(monitor.config)} + + + {trimSlug(monitor.project.slug)} + + {onToggleStatus && ( @@ -219,13 +232,24 @@ const DetailsHeadline = styled('div')` grid-template-columns: 1fr minmax(30px, max-content); `; +const ProjectScheduleDetails = styled('div')` + display: flex; + gap: ${space(1)}; + flex-wrap: wrap; +`; + +const ProjectDetails = styled('div')` + display: flex; + gap: ${space(0.5)}; +`; + const Name = styled('h3')` font-size: ${p => p.theme.fontSizeLarge}; margin-bottom: ${space(0.25)}; word-break: break-word; `; -const Schedule = styled('small')` +const DetailsText = styled('small')` color: ${p => p.theme.subText}; font-size: ${p => p.theme.fontSizeSmall}; `; diff --git a/static/app/views/organizationCreate/index.spec.tsx b/static/app/views/organizationCreate/index.spec.tsx index 23f2122db85357..fc36d1be42113d 100644 --- a/static/app/views/organizationCreate/index.spec.tsx +++ b/static/app/views/organizationCreate/index.spec.tsx @@ -1,7 +1,7 @@ -import selectEvent from 'react-select-event'; import {OrganizationFixture} from 'sentry-fixture/organization'; import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import selectEvent from 'sentry-test/selectEvent'; import ConfigStore from 'sentry/stores/configStore'; import OrganizationCreate, { diff --git a/static/app/views/performance/browser/resources/resourceView/index.tsx b/static/app/views/performance/browser/resources/resourceView/index.tsx index 4903e815725115..72e8631dc57111 100644 --- a/static/app/views/performance/browser/resources/resourceView/index.tsx +++ b/static/app/views/performance/browser/resources/resourceView/index.tsx @@ -61,12 +61,14 @@ function ResourceView() { return ( - + + + @@ -200,6 +202,10 @@ export function TransactionSelector({ ); } +export const SpanTimeChartsContainer = styled('div')` + margin-bottom: ${space(2)}; +`; + export const FilterOptionsContainer = styled('div')<{columnCount: number}>` display: grid; grid-template-columns: repeat(${props => props.columnCount}, 1fr); diff --git a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.spec.tsx b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.spec.tsx index f10d504b497817..5cf103b1d645ea 100644 --- a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.spec.tsx +++ b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.spec.tsx @@ -137,11 +137,13 @@ describe('PerformanceScoreBreakdownChart', function () { 'weighted_performance_score(measurements.score.fcp)', 'weighted_performance_score(measurements.score.cls)', 'weighted_performance_score(measurements.score.fid)', + 'weighted_performance_score(measurements.score.inp)', 'weighted_performance_score(measurements.score.ttfb)', 'performance_score(measurements.score.lcp)', 'performance_score(measurements.score.fcp)', 'performance_score(measurements.score.cls)', 'performance_score(measurements.score.fid)', + 'performance_score(measurements.score.inp)', 'performance_score(measurements.score.ttfb)', 'count()', ], @@ -208,11 +210,13 @@ describe('PerformanceScoreBreakdownChart', function () { 'weighted_performance_score(measurements.score.fcp)', 'weighted_performance_score(measurements.score.cls)', 'weighted_performance_score(measurements.score.fid)', + 'weighted_performance_score(measurements.score.inp)', 'weighted_performance_score(measurements.score.ttfb)', 'performance_score(measurements.score.lcp)', 'performance_score(measurements.score.fcp)', 'performance_score(measurements.score.cls)', 'performance_score(measurements.score.fid)', + 'performance_score(measurements.score.inp)', 'performance_score(measurements.score.ttfb)', 'count()', ], diff --git a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx index 4a02cec2764bdd..22c52a4f238118 100644 --- a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx +++ b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx @@ -56,7 +56,7 @@ export function PerformanceScoreBreakdownChart({transaction}: Props) { const shouldUseStoredScores = useStoredScoresSetting(); const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const theme = useTheme(); - const segmentColors = theme.charts.getColorPalette(3); + const segmentColors = [...theme.charts.getColorPalette(3).slice(0, 5), theme.gray200]; const pageFilters = usePageFilters(); @@ -234,6 +234,9 @@ export function PerformanceScoreBreakdownChart({transaction}: Props) { preserveIncompletePoints tooltipFormatterOptions={{ nameFormatter: (name, seriesParams: any) => { + if (shouldReplaceFidWithInp && name === 'FID') { + return `${name} Score (${t('Deprecated')})`; + } const timestamp = seriesParams?.data[0]; const weights = weightsSeries.find( series => series.name === timestamp diff --git a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx index 4f5ee23439d647..2f6d75ec4b5ce6 100644 --- a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx +++ b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx @@ -102,7 +102,7 @@ describe('PageOverview', function () { }); render(); await screen.findAllByText('Interactions'); - userEvent.click(screen.getAllByText('Interactions')[0]); + await userEvent.click(screen.getAllByText('Interactions')[0]); await waitFor(() => expect(eventsMock).toHaveBeenLastCalledWith( '/organizations/org-slug/events/', diff --git a/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx b/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx index 1ba08c2fe9a8d3..fa626c1141263e 100644 --- a/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx +++ b/static/app/views/performance/browser/webVitals/performanceScoreChart.tsx @@ -25,7 +25,7 @@ type Props = { }; export const ORDER = ['lcp', 'fcp', 'fid', 'cls', 'ttfb']; -export const ORDER_WITH_INP = ['lcp', 'fcp', 'inp', 'cls', 'ttfb']; +export const ORDER_WITH_INP = ['lcp', 'fcp', 'inp', 'cls', 'ttfb', 'fid']; export function PerformanceScoreChart({ projectScore, diff --git a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery.tsx index c2558235d63223..f940d391711971 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsQuery.tsx @@ -25,6 +25,7 @@ export const useProjectRawWebVitalsQuery = ({transaction, tag, dataset}: Props = 'p75(measurements.cls)', 'p75(measurements.ttfb)', 'p75(measurements.fid)', + 'p75(measurements.inp)', 'p75(transaction.duration)', 'count_web_vitals(measurements.lcp, any)', 'count_web_vitals(measurements.fcp, any)', @@ -35,7 +36,9 @@ export const useProjectRawWebVitalsQuery = ({transaction, tag, dataset}: Props = ], name: 'Web Vitals', query: [ - 'transaction.op:pageload', + // TODO: inp spans don't have a transaction.op. + // Plan to update this filter to also check span.op:ui.interaction.click once we have the ability. + 'transaction.op:[pageload,""]', ...(transaction ? [`transaction:"${transaction}"`] : []), ...(tag ? [`{tag.key}:"${tag.name}"`] : []), ].join(' '), @@ -57,13 +60,5 @@ export const useProjectRawWebVitalsQuery = ({transaction, tag, dataset}: Props = skipAbort: true, referrer: 'api.performance.browser.web-vitals.project', }); - // Fake INP data with FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - if (result.data?.data[0]) { - result.data.data[0]['count_web_vitals(measurements.inp, any)'] = - result.data.data[0]['count_web_vitals(measurements.fid, any)']; - result.data.data[0]['p75(measurements.inp)'] = - result.data.data[0]['p75(measurements.fid)']; - } return result; }; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsValuesTimeseriesQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsValuesTimeseriesQuery.tsx index a6baedcfbe5094..9e2b1d0c31e0a6 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsValuesTimeseriesQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/rawWebVitalsQueries/useProjectRawWebVitalsValuesTimeseriesQuery.tsx @@ -30,14 +30,15 @@ export const useProjectRawWebVitalsValuesTimeseriesQuery = ({ 'p75(measurements.cls)', 'p75(measurements.ttfb)', 'p75(measurements.fid)', + 'p75(measurements.inp)', 'count()', - // TODO: Remove this once we can query for INP. - // Currently using this to fake INP count data. - 'count_web_vitals(measurements.fid,any)', + 'count_scores(measurements.score.inp)', ], name: 'Web Vitals', query: [ - 'transaction.op:pageload', + // TODO: inp spans don't have a transaction.op. + // Plan to update this filter to also check span.op:ui.interaction.click once we have the ability. + 'transaction.op:[pageload,""]', ...(transaction ? [`transaction:"${transaction}"`] : []), ].join(' '), version: 2, @@ -104,8 +105,9 @@ export const useProjectRawWebVitalsValuesTimeseriesQuery = ({ {key: 'p75(measurements.fcp)', series: data.fcp}, {key: 'p75(measurements.ttfb)', series: data.ttfb}, {key: 'p75(measurements.fid)', series: data.fid}, + {key: 'p75(measurements.inp)', series: data.inp}, {key: 'count()', series: data.count}, - {key: 'count_web_vitals(measurements.fid,any)', series: data.countInp}, + {key: 'count_scores(measurements.score.inp)', series: data.countInp}, ]; map.forEach(({key, series}) => { if (result?.data?.[key].data[index][1][0].count !== null) { @@ -117,9 +119,5 @@ export const useProjectRawWebVitalsValuesTimeseriesQuery = ({ }); }); - // Fake INP data with FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - data.inp = data.fid; - return {data, isLoading: result.isLoading}; }; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored.tsx index 8cd80bae803297..9a027a67c14af2 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/calculatePerformanceScoreFromStored.tsx @@ -31,9 +31,16 @@ function getTotalScore(data: TableDataRow): number { return data[`avg(measurements.score.total)`] as number; } +function getWebVitalScoreCount( + data: TableDataRow, + webVital: WebVitals | 'total' +): number { + return data[`count_scores(measurements.score.${webVital})`] as number; +} + function hasWebVitalScore(data: TableDataRow, webVital: WebVitals): boolean { if (data.hasOwnProperty(`count_scores(measurements.score.${webVital})`)) { - return (data[`count_scores(measurements.score.${webVital})`] as number) > 0; + return getWebVitalScoreCount(data, webVital) > 0; } return false; } @@ -65,12 +72,36 @@ export function getWebVitalScores(data?: TableDataRow): ProjectScore { fidScore: hasFid ? Math.round(getWebVitalScore(data, 'fid') * 100) : undefined, inpScore: hasInp ? Math.round(getWebVitalScore(data, 'inp') * 100) : undefined, totalScore: Math.round(getTotalScore(data) * 100), - lcpWeight: Math.round(getWebVitalWeight(data, 'lcp') * 100), - fcpWeight: Math.round(getWebVitalWeight(data, 'fcp') * 100), - clsWeight: Math.round(getWebVitalWeight(data, 'cls') * 100), - ttfbWeight: Math.round(getWebVitalWeight(data, 'ttfb') * 100), - fidWeight: Math.round(getWebVitalWeight(data, 'fid') * 100), - inpWeight: Math.round(getWebVitalWeight(data, 'inp') * 100), + ...calculateWeights(data), }; return scores; } + +const calculateWeights = (data: TableDataRow) => { + // We need to do this because INP and pageLoads are different score profiles + const inpScoreCount = getWebVitalScoreCount(data, 'inp') || 0; + const totalScoreCount = getWebVitalScoreCount(data, 'total'); + const pageLoadCount = totalScoreCount - inpScoreCount; + + const inpWeight = getWebVitalWeight(data, 'inp'); + const inpActualWeight = Math.round( + ((inpWeight * inpScoreCount) / totalScoreCount) * 100 + ); + + const pageLoadWebVitals: WebVitals[] = ['lcp', 'fcp', 'cls', 'ttfb', 'fid']; + const [lcpWeight, fcpWeight, clsWeight, ttfbWeight, fidWeight] = pageLoadWebVitals.map( + webVital => { + const weight = getWebVitalWeight(data, webVital); + const actualWeight = Math.round(((weight * pageLoadCount) / totalScoreCount) * 100); + return actualWeight; + } + ); + return { + lcpWeight, + fcpWeight, + clsWeight, + ttfbWeight, + fidWeight, + inpWeight: inpActualWeight, + }; +}; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery.tsx index e5a93b48b4505c..15f0bba5841ddf 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresQuery.tsx @@ -27,6 +27,7 @@ export const useProjectWebVitalsScoresQuery = ({ const pageFilters = usePageFilters(); const location = useLocation(); const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); + const inpOrFid = shouldReplaceFidWithInp ? 'inp' : 'fid'; const projectEventView = EventView.fromNewQueryWithPageFilters( { @@ -34,13 +35,13 @@ export const useProjectWebVitalsScoresQuery = ({ 'performance_score(measurements.score.lcp)', 'performance_score(measurements.score.fcp)', 'performance_score(measurements.score.cls)', - 'performance_score(measurements.score.fid)', + `performance_score(measurements.score.${inpOrFid})`, 'performance_score(measurements.score.ttfb)', 'avg(measurements.score.total)', 'avg(measurements.score.weight.lcp)', 'avg(measurements.score.weight.fcp)', 'avg(measurements.score.weight.cls)', - 'avg(measurements.score.weight.fid)', + `avg(measurements.score.weight.${inpOrFid})`, 'avg(measurements.score.weight.ttfb)', 'count()', 'count_scores(measurements.score.total)', @@ -48,21 +49,16 @@ export const useProjectWebVitalsScoresQuery = ({ 'count_scores(measurements.score.fcp)', 'count_scores(measurements.score.cls)', 'count_scores(measurements.score.ttfb)', - 'count_scores(measurements.score.fid)', + `count_scores(measurements.score.${inpOrFid})`, ...(weightWebVital !== 'total' - ? [ - // TODO: Remove this once we can query for INP. - `sum(measurements.score.weight.${ - shouldReplaceFidWithInp && weightWebVital === 'inp' - ? 'fid' - : weightWebVital - })`, - ] + ? [`sum(measurements.score.weight.${weightWebVital})`] : []), ], name: 'Web Vitals', query: [ - 'transaction.op:pageload', + // TODO: inp spans don't have a transaction.op. + // Plan to update this filter to also check span.op:ui.interaction.click once we have the ability. + 'transaction.op:[pageload,""]', ...(transaction ? [`transaction:"${transaction}"`] : []), ...(tag ? [`${tag.key}:"${tag.name}"`] : []), ].join(' '), @@ -86,20 +82,5 @@ export const useProjectWebVitalsScoresQuery = ({ referrer: 'api.performance.browser.web-vitals.project-scores', }); - if ( - result.status === 'success' && - result.data?.data?.[0]?.['avg(measurements.score.weight.fid)'] !== undefined && - result.data?.data?.[0]?.['count_scores(measurements.score.fid)'] !== undefined && - result.data?.data?.[0]?.['performance_score(measurements.score.fid)'] !== undefined - ) { - // Fake INP data with FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - result.data.data[0]['avg(measurements.score.weight.inp)'] = - result.data.data[0]['avg(measurements.score.weight.fid)']; - result.data.data[0]['count_scores(measurements.score.inp)'] = - result.data.data[0]['count_scores(measurements.score.fid)']; - result.data.data[0]['performance_score(measurements.score.inp)'] = - result.data.data[0]['performance_score(measurements.score.fid)']; - } return result; }; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresTimeseriesQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresTimeseriesQuery.tsx index fd68ecbda395aa..332ded001258f8 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresTimeseriesQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useProjectWebVitalsScoresTimeseriesQuery.tsx @@ -42,17 +42,21 @@ export const useProjectWebVitalsScoresTimeseriesQuery = ({ 'weighted_performance_score(measurements.score.fcp)', 'weighted_performance_score(measurements.score.cls)', 'weighted_performance_score(measurements.score.fid)', + 'weighted_performance_score(measurements.score.inp)', 'weighted_performance_score(measurements.score.ttfb)', 'performance_score(measurements.score.lcp)', 'performance_score(measurements.score.fcp)', 'performance_score(measurements.score.cls)', 'performance_score(measurements.score.fid)', + 'performance_score(measurements.score.inp)', 'performance_score(measurements.score.ttfb)', 'count()', ], name: 'Web Vitals', query: [ - 'transaction.op:pageload has:measurements.score.total', + // TODO: inp spans don't have a transaction.op. + // Plan to update this filter to also check span.op:ui.interaction.click once we have the ability. + 'transaction.op:[pageload,""] has:measurements.score.total', ...(transaction ? [`transaction:"${transaction}"`] : []), ...(tag ? [`${tag.key}:"${tag.name}"`] : []), ].join(' '), @@ -110,7 +114,7 @@ export const useProjectWebVitalsScoresTimeseriesQuery = ({ result?.data?.['weighted_performance_score(measurements.score.lcp)']?.data.forEach( (interval, index) => { // Weighted data - ['lcp', 'fcp', 'cls', 'ttfb', 'fid'].forEach(webVital => { + ['lcp', 'fcp', 'cls', 'ttfb', 'fid', 'inp'].forEach(webVital => { data[webVital].push({ value: result?.data?.[`weighted_performance_score(measurements.score.${webVital})`] @@ -119,7 +123,7 @@ export const useProjectWebVitalsScoresTimeseriesQuery = ({ }); }); // Unweighted data - ['lcp', 'fcp', 'cls', 'ttfb', 'fid'].forEach(webVital => { + ['lcp', 'fcp', 'cls', 'ttfb', 'fid', 'inp'].forEach(webVital => { // Capitalize first letter of webVital const capitalizedWebVital = webVital.charAt(0).toUpperCase() + webVital.slice(1); data[`unweighted${capitalizedWebVital}`].push({ @@ -133,9 +137,5 @@ export const useProjectWebVitalsScoresTimeseriesQuery = ({ } ); - // Fake INP data with FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - data.inp = data.fid; - data.unweightedInp = data.unweightedFid; return {data, isLoading: result.isLoading}; }; diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx index 0ced6cfcc84d43..3f167d27ae9c19 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionSamplesWebVitalsScoresQuery.tsx @@ -15,7 +15,6 @@ import { SORTABLE_INDEXED_FIELDS, SORTABLE_INDEXED_SCORE_FIELDS, } from 'sentry/views/performance/browser/webVitals/utils/types'; -import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import {useWebVitalsSort} from 'sentry/views/performance/browser/webVitals/utils/useWebVitalsSort'; @@ -44,7 +43,6 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ const pageFilters = usePageFilters(); const location = useLocation(); const shouldUseStoredScores = useStoredScoresSetting(); - const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); const filteredSortableFields = shouldUseStoredScores ? SORTABLE_INDEXED_FIELDS @@ -106,8 +104,6 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ referrer: 'api.performance.browser.web-vitals.transaction', }); - // TODO: Remove this once we can query for INP. - const webVitalKey = shouldReplaceFidWithInp && webVital === 'fid' ? 'inp' : webVital; const toNumber = (item: ReactText) => (item ? parseFloat(item.toString()) : undefined); const tableData: TransactionSampleRowWithScore[] = !isLoading && data?.data.length @@ -121,7 +117,6 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ 'measurements.cls': toNumber(row['measurements.cls']), 'measurements.ttfb': toNumber(row['measurements.ttfb']), 'measurements.fid': toNumber(row['measurements.fid']), - 'measurements.inp': toNumber(row['measurements.fid']), 'transaction.duration': toNumber(row['transaction.duration']), replayId: row.replayId?.toString(), 'profile.id': row['profile.id']?.toString(), @@ -132,12 +127,12 @@ export const useTransactionSamplesWebVitalsScoresQuery = ({ ), ...(webVital ? { - [`${webVitalKey}Score`]: Math.round( + [`${webVital}Score`]: Math.round( ((toNumber(row[`measurements.score.${webVital}`]) ?? 0) / (toNumber(row[`measurements.score.weight.${webVital}`]) ?? 0)) * 100 ), - [`${webVitalKey}Weight`]: Math.round( + [`${webVital}Weight`]: Math.round( (toNumber(row[`measurements.score.weight.${webVital}`]) ?? 0) * 100 ), } diff --git a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionWebVitalsScoresQuery.tsx b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionWebVitalsScoresQuery.tsx index 637cf8a467fbcc..dd268434af81f7 100644 --- a/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionWebVitalsScoresQuery.tsx +++ b/static/app/views/performance/browser/webVitals/utils/queries/storedScoreQueries/useTransactionWebVitalsScoresQuery.tsx @@ -46,23 +46,23 @@ export const useTransactionWebVitalsScoresQuery = ({ 'p75(measurements.cls)', 'p75(measurements.ttfb)', 'p75(measurements.fid)', + 'p75(measurements.inp)', 'performance_score(measurements.score.lcp)', 'performance_score(measurements.score.fcp)', 'performance_score(measurements.score.cls)', 'performance_score(measurements.score.fid)', + 'performance_score(measurements.score.inp)', 'performance_score(measurements.score.ttfb)', 'avg(measurements.score.total)', 'count()', `opportunity_score(measurements.score.${opportunityWebVital})`, - 'count_scores(measurements.score.lcp)', - 'count_scores(measurements.score.fcp)', - 'count_scores(measurements.score.cls)', - 'count_scores(measurements.score.ttfb)', - 'count_scores(measurements.score.fid)', + `count_scores(measurements.score.${opportunityWebVital})`, ], name: 'Web Vitals', query: [ - 'transaction.op:pageload', + // TODO: inp spans don't have a transaction.op. + // Plan to update this filter to also check span.op:ui.interaction.click once we have the ability. + 'transaction.op:[pageload,""]', 'avg(measurements.score.total):>=0', ...(transaction ? [`transaction:"${transaction}"`] : []), ...(query ? [query] : []), @@ -90,8 +90,15 @@ export const useTransactionWebVitalsScoresQuery = ({ const tableData: RowWithScoreAndOpportunity[] = !isLoading && data?.data.length ? data.data.map(row => { - const {totalScore, clsScore, fcpScore, lcpScore, ttfbScore, fidScore} = - calculatePerformanceScoreFromStoredTableDataRow(row); + const { + totalScore, + clsScore, + fcpScore, + lcpScore, + ttfbScore, + fidScore, + inpScore, + } = calculatePerformanceScoreFromStoredTableDataRow(row); return { transaction: row.transaction?.toString(), 'p75(measurements.lcp)': row['p75(measurements.lcp)'] as number, @@ -99,27 +106,10 @@ export const useTransactionWebVitalsScoresQuery = ({ 'p75(measurements.cls)': row['p75(measurements.cls)'] as number, 'p75(measurements.ttfb)': row['p75(measurements.ttfb)'] as number, 'p75(measurements.fid)': row['p75(measurements.fid)'] as number, - // Fake INP data using FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - 'p75(measurements.inp)': row['p75(measurements.fid)'] as number, + 'p75(measurements.inp)': row['p75(measurements.inp)'] as number, 'count()': row['count()'] as number, - 'count_scores(measurements.score.lcp)': row[ - 'count_scores(measurements.score.lcp)' - ] as number, - 'count_scores(measurements.score.fcp)': row[ - 'count_scores(measurements.score.fcp)' - ] as number, - 'count_scores(measurements.score.cls)': row[ - 'count_scores(measurements.score.cls)' - ] as number, - 'count_scores(measurements.score.ttfb)': row[ - 'count_scores(measurements.score.ttfb)' - ] as number, - 'count_scores(measurements.score.fid)': row[ - 'count_scores(measurements.score.fid)' - ] as number, - 'count_scores(measurements.score.inp)': row[ - 'count_scores(measurements.score.fid)' + [`count_scores(measurements.score.${opportunityWebVital})`]: row[ + `count_scores(measurements.score.${opportunityWebVital})` ] as number, totalScore: totalScore ?? 0, clsScore: clsScore ?? 0, @@ -127,9 +117,7 @@ export const useTransactionWebVitalsScoresQuery = ({ lcpScore: lcpScore ?? 0, ttfbScore: ttfbScore ?? 0, fidScore: fidScore ?? 0, - // Fake INP data using FID data - // TODO(edwardgou): Remove this once INP is queryable in discover - inpScore: fidScore ?? 0, + inpScore: inpScore ?? 0, opportunity: row[ `opportunity_score(measurements.score.${opportunityWebVital})` ] as number, diff --git a/static/app/views/performance/browser/webVitals/webVitalsDetailPanel.tsx b/static/app/views/performance/browser/webVitals/webVitalsDetailPanel.tsx index 01fcbef5083ea2..75f083c18bda84 100644 --- a/static/app/views/performance/browser/webVitals/webVitalsDetailPanel.tsx +++ b/static/app/views/performance/browser/webVitals/webVitalsDetailPanel.tsx @@ -12,7 +12,7 @@ import GridEditable, {COL_WIDTH_UNDEFINED} from 'sentry/components/gridEditable' import ExternalLink from 'sentry/components/links/externalLink'; import {Tooltip} from 'sentry/components/tooltip'; import {t, tct} from 'sentry/locale'; -import {getDuration} from 'sentry/utils/formatters'; +import {formatAbbreviatedNumber, getDuration} from 'sentry/utils/formatters'; import {PageAlert, PageAlertProvider} from 'sentry/utils/performance/contexts/pageAlert'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; @@ -31,7 +31,6 @@ import type { RowWithScoreAndOpportunity, WebVitals, } from 'sentry/views/performance/browser/webVitals/utils/types'; -import {useReplaceFidWithInpSetting} from 'sentry/views/performance/browser/webVitals/utils/useReplaceFidWithInpSetting'; import {useStoredScoresSetting} from 'sentry/views/performance/browser/webVitals/utils/useStoredScoresSetting'; import DetailPanel from 'sentry/views/starfish/components/detailPanel'; @@ -39,7 +38,7 @@ type Column = GridColumnHeader; const columnOrder: GridColumnOrder[] = [ {key: 'transaction', width: COL_WIDTH_UNDEFINED, name: 'Pages'}, - {key: 'count()', width: COL_WIDTH_UNDEFINED, name: 'Pageloads'}, + {key: 'count', width: COL_WIDTH_UNDEFINED, name: 'Pageloads'}, {key: 'webVital', width: COL_WIDTH_UNDEFINED, name: 'Web Vital'}, {key: 'score', width: COL_WIDTH_UNDEFINED, name: 'Score'}, {key: 'opportunity', width: COL_WIDTH_UNDEFINED, name: 'Opportunity'}, @@ -59,14 +58,11 @@ export function WebVitalsDetailPanel({ const organization = useOrganization(); const location = useLocation(); const shouldUseStoredScores = useStoredScoresSetting(); - const shouldReplaceFidWithInp = useReplaceFidWithInpSetting(); - // TODO: Revert this when INP is queryable in discover. - const webVitalFilter = shouldReplaceFidWithInp && webVital === 'inp' ? 'fid' : webVital; const {data: projectData} = useProjectRawWebVitalsQuery({}); const {data: projectScoresData} = useProjectWebVitalsScoresQuery({ enabled: shouldUseStoredScores, - weightWebVital: webVitalFilter ?? 'total', + weightWebVital: webVital ?? 'total', }); const projectScore = shouldUseStoredScores @@ -74,18 +70,18 @@ export function WebVitalsDetailPanel({ : calculatePerformanceScoreFromTableDataRow(projectData?.data?.[0]); const {data, isLoading} = useTransactionWebVitalsQuery({ limit: 100, - opportunityWebVital: webVitalFilter ?? 'total', + opportunityWebVital: webVital ?? 'total', ...(webVital ? shouldUseStoredScores ? { - query: `count_scores(measurements.score.${webVitalFilter}):>0`, + query: `count_scores(measurements.score.${webVital}):>0`, defaultSort: { - field: `opportunity_score(measurements.score.${webVitalFilter})`, + field: `opportunity_score(measurements.score.${webVital})`, kind: 'desc', }, } : { - query: `count_web_vitals(measurements.${webVitalFilter},any):>0`, + query: `count_web_vitals(measurements.${webVital},any):>0`, } : {}), enabled: webVital !== null, @@ -97,7 +93,7 @@ export function WebVitalsDetailPanel({ } const count = projectData?.data?.[0]?.['count()'] as number; const sumWeights = projectScoresData?.data?.[0]?.[ - `sum(measurements.score.weight.${webVitalFilter})` + `sum(measurements.score.weight.${webVital})` ] as number; return data .map(row => ({ @@ -131,7 +127,6 @@ export function WebVitalsDetailPanel({ projectScoresData?.data, shouldUseStoredScores, webVital, - webVitalFilter, ]); const {data: timeseriesData, isLoading: isTimeseriesLoading} = @@ -181,6 +176,11 @@ export function WebVitalsDetailPanel({ ); } + if (col.key === 'count') { + if (webVital === 'inp') { + return {t('Interactions')}; + } + } return {col.name}; }; @@ -232,6 +232,11 @@ export function WebVitalsDetailPanel({ ); } + if (key === 'count') { + const count = + webVital === 'inp' ? row['count_scores(measurements.score.inp)'] : row['count()']; + return {formatAbbreviatedNumber(count)}; + } return {row[key]}; }; diff --git a/static/app/views/performance/database/databaseSpanSummaryPage.tsx b/static/app/views/performance/database/databaseSpanSummaryPage.tsx index f3cfcfa8a770f6..9081c09db57f28 100644 --- a/static/app/views/performance/database/databaseSpanSummaryPage.tsx +++ b/static/app/views/performance/database/databaseSpanSummaryPage.tsx @@ -10,25 +10,27 @@ import {EnvironmentPageFilter} from 'sentry/components/organizations/environment import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Sort} from 'sentry/utils/discover/fields'; +import {DurationUnit, RateUnit, type Sort} from 'sentry/utils/discover/fields'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import {DurationChart} from 'sentry/views/performance/database/durationChart'; import {ThroughputChart} from 'sentry/views/performance/database/throughputChart'; import {useSelectedDurationAggregate} from 'sentry/views/performance/database/useSelectedDurationAggregate'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; import {useSynchronizeCharts} from 'sentry/views/starfish/components/chart'; import {DatabaseSpanDescription} from 'sentry/views/starfish/components/spanDescription'; +import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics'; import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useSpanMetricsSeries'; import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; +import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; import {useModuleSort} from 'sentry/views/starfish/views/spans/useModuleSort'; import {SampleList} from 'sentry/views/starfish/views/spanSummaryPage/sampleList'; -import {SpanMetricsRibbon} from 'sentry/views/starfish/views/spanSummaryPage/spanMetricsRibbon'; import {SpanTransactionsTable} from 'sentry/views/starfish/views/spanSummaryPage/spanTransactionsTable'; type Query = { @@ -64,7 +66,7 @@ function SpanSummaryPage({params}: Props) { const sort = useModuleSort(QueryParameterNames.ENDPOINTS_SORT, DEFAULT_SORT); - const {data} = useSpanMetrics({ + const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics({ filters, fields: [ SpanMetricsField.SPAN_OP, @@ -157,7 +159,32 @@ function SpanSummaryPage({params}: Props) { - + + + + + + + @@ -239,4 +266,10 @@ const DescriptionContainer = styled(ModuleLayout.Full)` line-height: 1.2; `; +const MetricsRibbon = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(4)}; +`; + export default SpanSummaryPage; diff --git a/static/app/views/performance/http/domainCell.tsx b/static/app/views/performance/http/domainCell.tsx new file mode 100644 index 00000000000000..8878e9b09a0f1c --- /dev/null +++ b/static/app/views/performance/http/domainCell.tsx @@ -0,0 +1,40 @@ +import {Link} from 'react-router'; +import * as qs from 'query-string'; + +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; + +interface Props { + domain?: string; +} + +export function DomainCell({domain}: Props) { + const location = useLocation(); + const organization = useOrganization(); + + // NOTE: This is for safety only, the product should not fetch or render rows with missing domains or project IDs + if (!domain) { + return NULL_DESCRIPTION; + } + + const queryString = { + ...location.query, + domain, + }; + + return ( + + + {domain} + + + ); +} + +const NULL_DESCRIPTION = <null>; diff --git a/static/app/views/performance/http/domainsTable.tsx b/static/app/views/performance/http/domainsTable.tsx index 49ffb17965b7c0..e9d2f8e737ba80 100644 --- a/static/app/views/performance/http/domainsTable.tsx +++ b/static/app/views/performance/http/domainsTable.tsx @@ -14,6 +14,7 @@ import {RATE_UNIT_TITLE, RateUnit} from 'sentry/utils/discover/fields'; import {VisuallyCompleteWithData} from 'sentry/utils/performanceForSentry'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; +import {DomainCell} from 'sentry/views/performance/http/domainCell'; import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; import type {MetricsResponse} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; @@ -135,6 +136,10 @@ function renderBodyCell( location: Location, organization: Organization ) { + if (column.key === 'span.domain') { + return ; + } + if (!meta?.fields) { return row[column.key]; } diff --git a/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx b/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx new file mode 100644 index 00000000000000..e657ac38ac2a46 --- /dev/null +++ b/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx @@ -0,0 +1,130 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + +import {render, screen, waitForElementToBeRemoved} from 'sentry-test/reactTestingLibrary'; + +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import usePageFilters from 'sentry/utils/usePageFilters'; +import {HTTPDomainSummaryPage} from 'sentry/views/performance/http/httpDomainSummaryPage'; + +jest.mock('sentry/utils/useLocation'); +jest.mock('sentry/utils/usePageFilters'); +jest.mock('sentry/utils/useOrganization'); + +describe('HTTPSummaryPage', function () { + const organization = OrganizationFixture(); + + let domainChartsRequestMock; + + jest.mocked(usePageFilters).mockReturnValue({ + isReady: true, + desyncedFilters: new Set(), + pinnedFilters: new Set(), + shouldPersist: true, + selection: { + datetime: { + period: '10d', + start: null, + end: null, + utc: false, + }, + environments: [], + projects: [], + }, + }); + + jest.mocked(useLocation).mockReturnValue({ + pathname: '', + search: '', + query: {domain: '*.sentry.dev', statsPeriod: '10d'}, + hash: '', + state: undefined, + action: 'PUSH', + key: '', + }); + + jest.mocked(useOrganization).mockReturnValue(organization); + + beforeEach(function () { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events/`, + method: 'GET', + body: { + data: [], + }, + }); + + domainChartsRequestMock = MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events-stats/`, + method: 'GET', + body: { + 'spm()': { + data: [ + [1699907700, [{count: 7810.2}]], + [1699908000, [{count: 1216.8}]], + ], + }, + }, + }); + }); + + afterAll(function () { + jest.resetAllMocks(); + }); + + it('fetches module data', async function () { + render(); + + expect(domainChartsRequestMock).toHaveBeenNthCalledWith( + 1, + `/organizations/${organization.slug}/events-stats/`, + expect.objectContaining({ + method: 'GET', + query: { + cursor: undefined, + dataset: 'spansMetrics', + environment: [], + excludeOther: 0, + field: [], + interval: '30m', + orderby: undefined, + partial: 1, + per_page: 50, + project: [], + query: 'span.module:http span.domain:"\\*.sentry.dev"', + referrer: 'api.starfish.http-module-domain-summary-throughput-chart', + statsPeriod: '10d', + topEvents: undefined, + yAxis: 'spm()', + }, + }) + ); + + expect(domainChartsRequestMock).toHaveBeenNthCalledWith( + 2, + `/organizations/${organization.slug}/events-stats/`, + expect.objectContaining({ + method: 'GET', + query: { + cursor: undefined, + dataset: 'spansMetrics', + environment: [], + excludeOther: 0, + field: [], + interval: '30m', + orderby: undefined, + partial: 1, + per_page: 50, + project: [], + query: 'span.module:http span.domain:"\\*.sentry.dev"', + referrer: 'api.starfish.http-module-domain-summary-duration-chart', + statsPeriod: '10d', + topEvents: undefined, + yAxis: 'avg(span.self_time)', + }, + }) + ); + + await waitForElementToBeRemoved(() => screen.queryAllByTestId('loading-indicator')); + }); +}); diff --git a/static/app/views/performance/http/httpDomainSummaryPage.tsx b/static/app/views/performance/http/httpDomainSummaryPage.tsx new file mode 100644 index 00000000000000..494754919700d2 --- /dev/null +++ b/static/app/views/performance/http/httpDomainSummaryPage.tsx @@ -0,0 +1,188 @@ +import React from 'react'; +import styled from '@emotion/styled'; + +import {Breadcrumbs} from 'sentry/components/breadcrumbs'; +import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; +import * as Layout from 'sentry/components/layouts/thirds'; +import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; +import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; +import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {DurationUnit, RateUnit} from 'sentry/utils/discover/fields'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {DurationChart} from 'sentry/views/performance/database/durationChart'; +import {ThroughputChart} from 'sentry/views/performance/database/throughputChart'; +import {useSelectedDurationAggregate} from 'sentry/views/performance/database/useSelectedDurationAggregate'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; +import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; +import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; +import {useSynchronizeCharts} from 'sentry/views/starfish/components/chart'; +import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics'; +import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useSpanMetricsSeries'; +import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; +import {ModuleName, SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; +import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; + +type Query = { + aggregate?: string; + domain?: string; +}; + +export function HTTPDomainSummaryPage() { + const location = useLocation(); + const organization = useOrganization(); + + const [selectedAggregate] = useSelectedDurationAggregate(); + + const {domain} = location.query; + + const filters: SpanMetricsQueryFilters = { + 'span.module': ModuleName.HTTP, + 'span.domain': domain, + }; + + const {data: domainMetrics, isLoading: areDomainMetricsLoading} = useSpanMetrics({ + filters, + fields: [ + SpanMetricsField.SPAN_DOMAIN, + `${SpanFunction.SPM}()`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + ], + referrer: 'api.starfish.http-module-domain-summary-metrics-ribbon', + }); + + const { + isLoading: isThroughputDataLoading, + data: throughputData, + error: throughputError, + } = useSpanMetricsSeries({ + filters, + yAxis: ['spm()'], + referrer: 'api.starfish.http-module-domain-summary-throughput-chart', + }); + + const { + isLoading: isDurationDataLoading, + data: durationData, + error: durationError, + } = useSpanMetricsSeries({ + filters, + yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], + referrer: 'api.starfish.http-module-domain-summary-duration-chart', + }); + + useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); + + return ( + + + + + {domain} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +const HeaderContainer = styled('div')` + display: flex; + justify-content: space-between; + flex-wrap: wrap; +`; + +const MetricsRibbon = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(4)}; +`; + +function LandingPageWithProviders() { + return ( + + + + ); +} + +export default LandingPageWithProviders; diff --git a/static/app/views/performance/http/httpLandingPage.spec.tsx b/static/app/views/performance/http/httpLandingPage.spec.tsx index 255f46173f931e..0258639ababcd9 100644 --- a/static/app/views/performance/http/httpLandingPage.spec.tsx +++ b/static/app/views/performance/http/httpLandingPage.spec.tsx @@ -50,7 +50,9 @@ describe('HTTPLandingPage', function () { url: `/organizations/${organization.slug}/events/`, method: 'GET', match: [ - MockApiClient.matchQuery({referrer: 'api.starfish.http-module-domains-list'}), + MockApiClient.matchQuery({ + referrer: 'api.starfish.http-module-landing-domains-list', + }), ], body: { data: [ @@ -153,7 +155,7 @@ describe('HTTPLandingPage', function () { per_page: 10, project: [], query: 'span.module:http has:span.domain', - referrer: 'api.starfish.http-module-domains-list', + referrer: 'api.starfish.http-module-landing-domains-list', sort: '-time_spent_percentage()', statsPeriod: '10d', }, @@ -168,7 +170,13 @@ describe('HTTPLandingPage', function () { await waitForElementToBeRemoved(() => screen.queryAllByTestId('loading-indicator')); - expect(screen.getByRole('cell', {name: '*.sentry.io'})).toBeInTheDocument(); - expect(screen.getByRole('cell', {name: '*.github.com'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: '*.sentry.io'})).toHaveAttribute( + 'href', + '/organizations/org-slug/performance/http/domains/?domain=%2A.sentry.io&statsPeriod=10d' + ); + expect(screen.getByRole('link', {name: '*.github.com'})).toHaveAttribute( + 'href', + '/organizations/org-slug/performance/http/domains/?domain=%2A.github.com&statsPeriod=10d' + ); }); }); diff --git a/static/app/views/performance/http/httpLandingPage.tsx b/static/app/views/performance/http/httpLandingPage.tsx index 5807baddcd047b..31797561ce3645 100644 --- a/static/app/views/performance/http/httpLandingPage.tsx +++ b/static/app/views/performance/http/httpLandingPage.tsx @@ -81,7 +81,7 @@ export function HTTPLandingPage() { sorts: [sort], limit: DOMAIN_TABLE_ROW_COUNT, cursor, - referrer: 'api.starfish.http-module-domains-list', + referrer: 'api.starfish.http-module-landing-domains-list', }); useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); diff --git a/static/app/views/performance/landing/widgets/components/selectableList.tsx b/static/app/views/performance/landing/widgets/components/selectableList.tsx index 7c6f4446bb1995..6cd9212d22de94 100644 --- a/static/app/views/performance/landing/widgets/components/selectableList.tsx +++ b/static/app/views/performance/landing/widgets/components/selectableList.tsx @@ -10,7 +10,7 @@ import {Tooltip} from 'sentry/components/tooltip'; import {IconClose} from 'sentry/icons'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {getConfigureIntegrationsDocsLink} from 'sentry/utils/docs'; +import {getConfigurePerformanceDocsLink} from 'sentry/utils/docs'; import usePageFilters from 'sentry/utils/usePageFilters'; import useProjects from 'sentry/utils/useProjects'; import {NoDataMessage} from 'sentry/views/performance/database/noDataMessage'; @@ -115,9 +115,9 @@ export function WidgetAddInstrumentationWarning({type}: {type: 'db' | 'http'}) { } const project = fullProjects.projects.find(p => p.id === '' + projects[0]); - const url = getConfigureIntegrationsDocsLink(project); + const docsLink = getConfigurePerformanceDocsLink(project); - if (!url) { + if (!docsLink) { return ; } @@ -126,10 +126,14 @@ export function WidgetAddInstrumentationWarning({type}: {type: 'db' | 'http'}) { {t('No results found')} {tct( - 'No transactions with [spanCategory] spans found, you may need to [added].', + 'No transactions with [spanCategory] spans found. You may need to add integrations to your [link] to capture these spans.', { spanCategory: type === 'db' ? t('Database') : t('HTTP'), - added: {t('add integrations')}, + link: ( + + {t('performance monitoring setup')} + + ), } )} diff --git a/static/app/views/performance/landing/widgets/components/widgetContainer.spec.tsx b/static/app/views/performance/landing/widgets/components/widgetContainer.spec.tsx index b8f426f42fb0cb..481f601fba3f7b 100644 --- a/static/app/views/performance/landing/widgets/components/widgetContainer.spec.tsx +++ b/static/app/views/performance/landing/widgets/components/widgetContainer.spec.tsx @@ -953,6 +953,7 @@ describe('Performance > Widgets > WidgetContainer', function () { 'p75(measurements.cls)', 'p75(measurements.ttfb)', 'p75(measurements.fid)', + 'p75(measurements.inp)', 'p75(transaction.duration)', 'count_web_vitals(measurements.lcp, any)', 'count_web_vitals(measurements.fcp, any)', @@ -961,7 +962,7 @@ describe('Performance > Widgets > WidgetContainer', function () { 'count_web_vitals(measurements.ttfb, any)', 'count()', ], - query: 'transaction.op:pageload', + query: 'transaction.op:[pageload,""]', }), }) ); diff --git a/static/app/views/performance/metricReadout.spec.tsx b/static/app/views/performance/metricReadout.spec.tsx new file mode 100644 index 00000000000000..6edcd8dd58c52b --- /dev/null +++ b/static/app/views/performance/metricReadout.spec.tsx @@ -0,0 +1,66 @@ +import {render, screen} from 'sentry-test/reactTestingLibrary'; + +import {DurationUnit, RateUnit, SizeUnit} from 'sentry/utils/discover/fields'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; + +describe('MetricReadout', function () { + it('shows a loading spinner if data is loading', () => { + render( + + ); + + expect(screen.getByText('Duration')).toBeInTheDocument(); + expect(screen.getByTestId('loading-indicator')).toBeInTheDocument(); + }); + + it('shows placeholder text if data is missing', () => { + render( + + ); + + expect(screen.getByRole('heading', {name: 'Duration'})).toBeInTheDocument(); + expect(screen.getByText('--')).toBeInTheDocument(); + }); + + it('parses strings', () => { + render(); + + expect(screen.getByRole('heading', {name: 'Rate'})).toBeInTheDocument(); + expect(screen.getByText('17.8/min')).toBeInTheDocument(); + }); + + it('renders rates', () => { + render(); + + expect(screen.getByRole('heading', {name: 'Rate'})).toBeInTheDocument(); + expect(screen.getByText('17.8/min')).toBeInTheDocument(); + }); + + it('renders milliseconds', () => { + render( + + ); + + expect(screen.getByRole('heading', {name: 'Duration'})).toBeInTheDocument(); + expect(screen.getByText('2.58d')).toBeInTheDocument(); + }); + + it('renders bytes', () => { + render(); + + expect(screen.getByRole('heading', {name: 'Size'})).toBeInTheDocument(); + expect(screen.getByText('1.1 MiB')).toBeInTheDocument(); + }); + + it('renders counts', () => { + render(); + + expect(screen.getByRole('heading', {name: 'Count'})).toBeInTheDocument(); + expect(screen.getByText('7.8m')).toBeInTheDocument(); + }); +}); diff --git a/static/app/views/performance/metricReadout.tsx b/static/app/views/performance/metricReadout.tsx new file mode 100644 index 00000000000000..d0d0ea0b2466d5 --- /dev/null +++ b/static/app/views/performance/metricReadout.tsx @@ -0,0 +1,113 @@ +import type {ReactText} from 'react'; +import {Fragment} from 'react'; +import styled from '@emotion/styled'; + +import Duration from 'sentry/components/duration'; +import FileSize from 'sentry/components/fileSize'; +import LoadingIndicator from 'sentry/components/loadingIndicator'; +import {Tooltip} from 'sentry/components/tooltip'; +import {defined} from 'sentry/utils'; +import type {CountUnit} from 'sentry/utils/discover/fields'; +import {DurationUnit, RateUnit, SizeUnit} from 'sentry/utils/discover/fields'; +import {formatAbbreviatedNumber, formatRate} from 'sentry/utils/formatters'; +import {Block} from 'sentry/views/starfish/views/spanSummaryPage/block'; + +type Unit = DurationUnit.MILLISECOND | SizeUnit.BYTE | RateUnit | CountUnit; + +interface Props { + title: string; + unit: Unit; + value: ReactText | undefined; + align?: 'left' | 'right'; + isLoading?: boolean; + tooltip?: React.ReactNode; +} + +export function MetricReadout(props: Props) { + return ( + + + + ); +} + +function ReadoutContent({unit, value, tooltip, align = 'right', isLoading}: Props) { + if (isLoading) { + return ( + + + + ); + } + + if (!defined(value)) { + return --; + } + + let renderedValue: React.ReactNode; + + if (isARateUnit(unit)) { + renderedValue = ( + + {formatRate(typeof value === 'string' ? parseFloat(value) : value, unit)} + + ); + } + + if (unit === DurationUnit.MILLISECOND) { + // TODO: Implement other durations + renderedValue = ( + + + + ); + } + + if (unit === SizeUnit.BYTE) { + // TODO: Implement other sizes + renderedValue = ( + + + + ); + } + + if (unit === 'count') { + renderedValue = ( + + {formatAbbreviatedNumber(typeof value === 'string' ? parseInt(value, 10) : value)} + + ); + } + + if (tooltip) { + return ( + + + {renderedValue} + + + ); + } + + return {renderedValue}; +} + +const NumberContainer = styled('div')<{align: 'left' | 'right'}>` + text-align: ${p => p.align}; + font-variant-numeric: tabular-nums; +`; + +const LoadingContainer = styled('div')<{align: 'left' | 'right'}>` + display: flex; + justify-content: ${p => (p.align === 'right' ? 'flex-end' : 'flex-start')}; + align-items: center; +`; + +function isARateUnit(unit: string): unit is RateUnit { + return (Object.values(RateUnit) as string[]).includes(unit); +} diff --git a/static/app/views/performance/newTraceDetails/trace.tsx b/static/app/views/performance/newTraceDetails/trace.tsx index d01c6c8f2941a7..88200afdf4e22a 100644 --- a/static/app/views/performance/newTraceDetails/trace.tsx +++ b/static/app/views/performance/newTraceDetails/trace.tsx @@ -609,7 +609,7 @@ function RenderRow(props: { }} >
props.onExpand(e, props.node, !props.node.expanded)} + errored={props.node.has_error} > {COUNT_FORMATTER.format(props.node.groupCount)} @@ -652,6 +653,9 @@ function RenderRow(props: { } if (isTransactionNode(props.node)) { + const errored = + props.node.value.errors.length > 0 || + props.node.value.performance_issues.length > 0; return (
{props.node.children.length > 0 || props.node.canFetch ? ( @@ -708,6 +713,7 @@ function RenderRow(props: { ? props.onZoomIn(e, props.node, !props.node.zoomedIn) : props.onExpand(e, props.node, !props.node.expanded) } + errored={errored} > {props.node.children.length > 0 ? COUNT_FORMATTER.format(props.node.children.length) @@ -742,6 +748,7 @@ function RenderRow(props: { } if (isSpanNode(props.node)) { + const errored = props.node.value.relatedErrors.length > 0; return (
{props.node.children.length > 0 ? COUNT_FORMATTER.format(props.node.children.length) @@ -1192,9 +1200,13 @@ function ChildrenButton(props: { icon: React.ReactNode; onClick: (e: React.MouseEvent) => void; status: TraceTreeNode['fetchStatus'] | undefined; + errored?: boolean; }) { return ( - + {hasNewOnboarding ? ( + + ) : ( + + )}