From 62dd685be942acc784020b6643cb2b4f297cc1b9 Mon Sep 17 00:00:00 2001 From: Andrew Liu Date: Fri, 1 Mar 2024 13:41:30 -0800 Subject: [PATCH 001/145] feat(replay): add kafka config for new ingest-feedbacks consumer --- src/sentry/conf/server.py | 1 + src/sentry/consumers/__init__.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index be508a86ec3c2e..0d0abebc39d17f 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3450,6 +3450,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS = "sessions-subscription-results" KAFKA_METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" KAFKA_INGEST_EVENTS = "ingest-events" +KAFKA_INGEST_FEEDBACKS = "ingest-feedbacks" KAFKA_INGEST_EVENTS_DLQ = "ingest-events-dlq" KAFKA_INGEST_ATTACHMENTS = "ingest-attachments" KAFKA_INGEST_TRANSACTIONS = "ingest-transactions" diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 92215abdd42352..968101b85ebf9c 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -269,6 +269,14 @@ def ingest_events_options() -> list[click.Option]: "consumer_type": "events", }, }, + "ingest-feedbacks": { + "topic": settings.KAFKA_INGEST_FEEDBACKS, + "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", + "click_options": ingest_events_options(), + "static_args": { + "consumer_type": "events", + }, + }, "ingest-attachments": { "topic": settings.KAFKA_INGEST_ATTACHMENTS, "strategy_factory": "sentry.ingest.consumer.factory.IngestStrategyFactory", From f0f614a563b19a5a0f2d050d042f604efc4e059d Mon Sep 17 00:00:00 2001 From: Andrew Liu Date: Mon, 4 Mar 2024 17:43:58 -0800 Subject: [PATCH 002/145] add ingest-feedbacks to KAFKA_TOPIC_TO_CLUSTER and kafka_definition --- src/sentry/conf/server.py | 1 + src/sentry/conf/types/kafka_definition.py | 1 + 2 files changed, 2 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 8441b7f9a68f85..a6646c65adea06 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3512,6 +3512,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] "sessions-subscription-results": "default", "metrics-subscription-results": "default", "ingest-events": "default", + "ingest-feedbacks": "default", "ingest-attachments": "default", "ingest-transactions": "default", "ingest-metrics": "default", diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index 61820572647de8..f9f38687453b7b 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -26,6 +26,7 @@ class Topic(Enum): METRICS_SUBSCRIPTIONS_RESULTS = "metrics-subscription-results" INGEST_EVENTS = "ingest-events" INGEST_EVENTS_DLQ = "ingest-events-dlq" + INGEST_FEEDBACKS = "ingest-feedbacks" INGEST_ATTACHMENTS = "ingest-attachments" INGEST_TRANSACTIONS = "ingest-transactions" INGEST_METRICS = "ingest-metrics" From 4dc60ad7ca172f18d7f6183cab5e5dedde4e4e6a Mon Sep 17 00:00:00 2001 From: Andrew Liu <159852527+aliu3ntry@users.noreply.github.com> Date: Mon, 4 Mar 2024 14:19:03 -0800 Subject: [PATCH 003/145] feat(replay): remove organizations:session-replay-event-linking feature flag (#66257) removes flag declaration and all refs https://github.com/getsentry/team-replay/issues/387 --- src/sentry/conf/server.py | 2 - src/sentry/features/__init__.py | 1 - src/sentry/tasks/post_process.py | 6 - tests/sentry/tasks/test_post_process.py | 157 ++++++++++++------------ 4 files changed, 76 insertions(+), 90 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index a6646c65adea06..af41c0d2a0cfd8 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1847,8 +1847,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:session-replay-enable-canvas": False, # Enable canvas replaying "organizations:session-replay-enable-canvas-replayer": False, - # Enable replay event linking in event processing - "organizations:session-replay-event-linking": False, # Enable linking from 'new issue' email notifs to the issue replay list "organizations:session-replay-issue-emails": False, # Enable the new event linking columns to be queried diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index fb45a3a496a149..584a970be35271 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -240,7 +240,6 @@ default_manager.add("organizations:session-replay-count-query-optimize", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas-replayer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas", OrganizationFeature, FeatureHandlerStrategy.REMOTE) -default_manager.add("organizations:session-replay-event-linking", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-issue-emails", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-new-event-counts", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-recording-scrubbing", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index e559d929f2e33d..3bb99bdbaa9813 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -1008,12 +1008,6 @@ def _get_replay_id(event): if job["is_reprocessed"]: return - if not features.has( - "organizations:session-replay-event-linking", job["event"].project.organization - ): - metrics.incr("post_process.process_replay_link.feature_not_enabled") - return - metrics.incr("post_process.process_replay_link.id_sampled") group_event = job["event"] diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 9c22c43614aad3..857f42cbfbd926 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -1907,38 +1907,37 @@ def test_replay_linkage(self, incr, kafka_producer, kafka_publisher): project_id=self.project.id, ) - with self.feature({"organizations:session-replay-event-linking": True}): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=event, - ) - assert kafka_producer.return_value.publish.call_count == 1 - assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events" + self.call_post_process_group( + is_new=True, + is_regression=False, + is_new_group_environment=True, + event=event, + ) + assert kafka_producer.return_value.publish.call_count == 1 + assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events" - ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1]) + ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1]) - assert ret_value["type"] == "replay_event" - assert ret_value["start_time"] - assert ret_value["replay_id"] == replay_id - assert ret_value["project_id"] == self.project.id - assert ret_value["segment_id"] is None - assert ret_value["retention_days"] == 90 + assert ret_value["type"] == "replay_event" + assert ret_value["start_time"] + assert ret_value["replay_id"] == replay_id + assert ret_value["project_id"] == self.project.id + assert ret_value["segment_id"] is None + assert ret_value["retention_days"] == 90 - # convert ret_value_payload which is a list of bytes to a string - ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8")) + # convert ret_value_payload which is a list of bytes to a string + ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8")) - assert ret_value_payload == { - "type": "event_link", - "replay_id": replay_id, - "error_id": event.event_id, - "timestamp": int(event.datetime.timestamp()), - "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())), - } + assert ret_value_payload == { + "type": "event_link", + "replay_id": replay_id, + "error_id": event.event_id, + "timestamp": int(event.datetime.timestamp()), + "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())), + } - incr.assert_any_call("post_process.process_replay_link.id_sampled") - incr.assert_any_call("post_process.process_replay_link.id_exists") + incr.assert_any_call("post_process.process_replay_link.id_sampled") + incr.assert_any_call("post_process.process_replay_link.id_exists") def test_replay_linkage_with_tag(self, incr, kafka_producer, kafka_publisher): replay_id = uuid.uuid4().hex @@ -1947,38 +1946,37 @@ def test_replay_linkage_with_tag(self, incr, kafka_producer, kafka_publisher): project_id=self.project.id, ) - with self.feature({"organizations:session-replay-event-linking": True}): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=event, - ) - assert kafka_producer.return_value.publish.call_count == 1 - assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events" + self.call_post_process_group( + is_new=True, + is_regression=False, + is_new_group_environment=True, + event=event, + ) + assert kafka_producer.return_value.publish.call_count == 1 + assert kafka_producer.return_value.publish.call_args[0][0] == "ingest-replay-events" - ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1]) + ret_value = json.loads(kafka_producer.return_value.publish.call_args[0][1]) - assert ret_value["type"] == "replay_event" - assert ret_value["start_time"] - assert ret_value["replay_id"] == replay_id - assert ret_value["project_id"] == self.project.id - assert ret_value["segment_id"] is None - assert ret_value["retention_days"] == 90 + assert ret_value["type"] == "replay_event" + assert ret_value["start_time"] + assert ret_value["replay_id"] == replay_id + assert ret_value["project_id"] == self.project.id + assert ret_value["segment_id"] is None + assert ret_value["retention_days"] == 90 - # convert ret_value_payload which is a list of bytes to a string - ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8")) + # convert ret_value_payload which is a list of bytes to a string + ret_value_payload = json.loads(bytes(ret_value["payload"]).decode("utf-8")) - assert ret_value_payload == { - "type": "event_link", - "replay_id": replay_id, - "error_id": event.event_id, - "timestamp": int(event.datetime.timestamp()), - "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())), - } + assert ret_value_payload == { + "type": "event_link", + "replay_id": replay_id, + "error_id": event.event_id, + "timestamp": int(event.datetime.timestamp()), + "event_hash": str(uuid.UUID(md5((event.event_id).encode("utf-8")).hexdigest())), + } - incr.assert_any_call("post_process.process_replay_link.id_sampled") - incr.assert_any_call("post_process.process_replay_link.id_exists") + incr.assert_any_call("post_process.process_replay_link.id_sampled") + incr.assert_any_call("post_process.process_replay_link.id_exists") def test_replay_linkage_with_tag_pii_scrubbed(self, incr, kafka_producer, kafka_publisher): event = self.create_event( @@ -1986,14 +1984,13 @@ def test_replay_linkage_with_tag_pii_scrubbed(self, incr, kafka_producer, kafka_ project_id=self.project.id, ) - with self.feature({"organizations:session-replay-event-linking": True}): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=event, - ) - assert kafka_producer.return_value.publish.call_count == 0 + self.call_post_process_group( + is_new=True, + is_regression=False, + is_new_group_environment=True, + event=event, + ) + assert kafka_producer.return_value.publish.call_count == 0 def test_no_replay(self, incr, kafka_producer, kafka_publisher): event = self.create_event( @@ -2001,15 +1998,14 @@ def test_no_replay(self, incr, kafka_producer, kafka_publisher): project_id=self.project.id, ) - with self.feature({"organizations:session-replay-event-linking": True}): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=event, - ) - assert kafka_producer.return_value.publish.call_count == 0 - incr.assert_any_call("post_process.process_replay_link.id_sampled") + self.call_post_process_group( + is_new=True, + is_regression=False, + is_new_group_environment=True, + event=event, + ) + assert kafka_producer.return_value.publish.call_count == 0 + incr.assert_any_call("post_process.process_replay_link.id_sampled") def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher): event = self.create_event( @@ -2017,16 +2013,15 @@ def test_0_sample_rate_replays(self, incr, kafka_producer, kafka_publisher): project_id=self.project.id, ) - with self.feature({"organizations:session-replay-event-linking": False}): - self.call_post_process_group( - is_new=True, - is_regression=False, - is_new_group_environment=True, - event=event, - ) - assert kafka_producer.return_value.publish.call_count == 0 - for args, _ in incr.call_args_list: - self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled")) + self.call_post_process_group( + is_new=True, + is_regression=False, + is_new_group_environment=True, + event=event, + ) + assert kafka_producer.return_value.publish.call_count == 0 + for args, _ in incr.call_args_list: + self.assertNotEqual(args, ("post_process.process_replay_link.id_sampled")) class DetectNewEscalationTestMixin(BasePostProgressGroupMixin): From 69e2a344f8c43fc69bb4502a20862dd79cae36c1 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:21:41 -0500 Subject: [PATCH 004/145] fix(metrics-extraction): Fix meta lookup in factory (#66256) Was using TSResult here which is wrong, it's a dict w/ data or a dict of dicts if it's a group-by. The presence of the data key should be indicative. Fixes SENTRY-2V84 --- src/sentry/api/endpoints/organization_events.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 03f6d5361bdef7..9835914df054a1 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -26,7 +26,7 @@ from sentry.snuba.metrics.extraction import MetricSpecType from sentry.snuba.referrer import Referrer from sentry.types.ratelimit import RateLimit, RateLimitCategory -from sentry.utils.snuba import SnubaError, SnubaTSResult +from sentry.utils.snuba import SnubaError logger = logging.getLogger(__name__) @@ -347,9 +347,10 @@ def fn(offset, limit) -> dict[str, Any]: has_errors = len(error_results["data"]) > 0 except SnubaError: has_errors = False + error_results = None original_results = _data_fn(scopedDataset, offset, limit, scoped_query) - if isinstance(original_results, SnubaTSResult): + if original_results.get("data"): dataset_meta = original_results.data.get("meta", {}) else: dataset_meta = list(original_results.values())[0].data.get("meta", {}) From 9d26087c062ef417efdd339ab0b8dd1f4f8596ab Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:21:53 -0500 Subject: [PATCH 005/145] fix(metrics-extraction): Allow overriding split decision (#66259) ### Summary If there are any problems with the split decision this will cause it to always re-run the code and save the new decision, --- src/sentry/api/endpoints/organization_events.py | 7 ++++++- src/sentry/api/endpoints/organization_events_stats.py | 7 ++++++- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 4 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/sentry/api/endpoints/organization_events.py b/src/sentry/api/endpoints/organization_events.py index 9835914df054a1..269ba4203e6779 100644 --- a/src/sentry/api/endpoints/organization_events.py +++ b/src/sentry/api/endpoints/organization_events.py @@ -324,8 +324,13 @@ def fn(offset, limit) -> dict[str, Any]: try: widget = DashboardWidget.objects.get(id=dashboard_widget_id) does_widget_have_split = widget.discover_widget_split is not None + has_override_feature = features.has( + "organizations:performance-discover-widget-split-override-save", + organization, + actor=request.user, + ) - if does_widget_have_split: + if does_widget_have_split and not has_override_feature: # This is essentially cached behaviour and we skip the check split_query = scoped_query if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS: diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 357980de4e12d4..14f0a524798798 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -323,8 +323,13 @@ def fn( try: widget = DashboardWidget.objects.get(id=dashboard_widget_id) does_widget_have_split = widget.discover_widget_split is not None + has_override_feature = features.has( + "organizations:performance-discover-widget-split-override-save", + organization, + actor=request.user, + ) - if does_widget_have_split: + if does_widget_have_split and not has_override_feature: # This is essentially cached behaviour and we skip the check split_query = query if widget.discover_widget_split == DashboardWidgetTypes.ERROR_EVENTS: diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index af41c0d2a0cfd8..d2b7e472406de1 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1721,6 +1721,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:performance-database-view-percentiles": False, # Enable UI sending a discover split for widget "organizations:performance-discover-widget-split-ui": False, + # Enable backend overriding and always making a fresh split decision + "organizations:performance-discover-widget-split-override-save": False, # Enables updated all events tab in a performance issue "organizations:performance-issues-all-events-tab": False, # Enable compressed assets performance issue type diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 584a970be35271..5059b89987c06b 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -176,6 +176,7 @@ default_manager.add("organizations:performance-database-view", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-db-main-thread-detector", OrganizationFeature) default_manager.add("organizations:performance-discover-widget-split-ui", OrganizationFeature, FeatureHandlerStrategy.REMOTE) +default_manager.add("organizations:performance-discover-widget-split-override-save", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-file-io-main-thread-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:performance-issues-all-events-tab", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:performance-issues-compressed-assets-detector", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) From 35a45b3c4b222f5e8fd756d649f85f9e54c11d7c Mon Sep 17 00:00:00 2001 From: Zach Collins Date: Mon, 4 Mar 2024 14:25:31 -0800 Subject: [PATCH 006/145] feat(autofix): timeout with autofix, query state (#66241) --- src/sentry/api/endpoints/group_ai_autofix.py | 12 ++- src/sentry/api/endpoints/seer_rpc.py | 38 +++++++++- .../api/endpoints/test_group_ai_autofix.py | 3 +- tests/sentry/api/endpoints/test_seer_rpc.py | 73 +++++++++++++++++++ 4 files changed, 122 insertions(+), 4 deletions(-) create mode 100644 tests/sentry/api/endpoints/test_seer_rpc.py diff --git a/src/sentry/api/endpoints/group_ai_autofix.py b/src/sentry/api/endpoints/group_ai_autofix.py index e53edea874e4b5..a9db0ac70e500f 100644 --- a/src/sentry/api/endpoints/group_ai_autofix.py +++ b/src/sentry/api/endpoints/group_ai_autofix.py @@ -112,6 +112,7 @@ def _call_autofix( repos: list[dict], event_entries: list[dict], additional_context: str, + timeout_secs: int, ): response = requests.post( f"{settings.SEER_AUTOFIX_URL}/v0/automation/autofix", @@ -123,10 +124,12 @@ def _call_autofix( "issue": { "id": group.id, "title": group.title, - "short_id": group.short_id, + "short_id": group.qualified_short_id, "events": [{"entries": event_entries}], }, "additional_context": additional_context, + "timeout_secs": timeout_secs, + "last_updated": datetime.now().isoformat(), "invoking_user": ( { "id": user.id, @@ -192,7 +195,12 @@ def post(self, request: Request, group: Group) -> Response: try: self._call_autofix( - request.user, group, repos, event_entries, data.get("additional_context", "") + request.user, + group, + repos, + event_entries, + data.get("additional_context", ""), + TIMEOUT_SECONDS, ) # Mark the task as completed after TIMEOUT_SECONDS diff --git a/src/sentry/api/endpoints/seer_rpc.py b/src/sentry/api/endpoints/seer_rpc.py index c2c2537721b15b..cde7f37b58a68e 100644 --- a/src/sentry/api/endpoints/seer_rpc.py +++ b/src/sentry/api/endpoints/seer_rpc.py @@ -5,6 +5,7 @@ from django.conf import settings from django.contrib.auth.models import AnonymousUser +from django.core.exceptions import ObjectDoesNotExist from rest_framework.exceptions import ( AuthenticationFailed, NotFound, @@ -25,6 +26,7 @@ from sentry.services.hybrid_cloud.sig import SerializableFunctionValueException from sentry.silo.base import SiloMode from sentry.utils import json +from sentry.utils.env import in_test_environment def compare_signature(url: str, body: bytes, signature: str) -> bool: @@ -131,8 +133,13 @@ def post(self, request: Request, method_name: str) -> Response: except SerializableFunctionValueException as e: capture_exception() raise ParseError from e + except ObjectDoesNotExist as e: + # Let this fall through, this is normal. + capture_exception() + raise NotFound from e except Exception as e: - # Produce more detailed log + if in_test_environment(): + raise if settings.DEBUG: raise Exception(f"Problem processing seer rpc endpoint {method_name}") from e capture_exception() @@ -174,7 +181,36 @@ def on_autofix_complete(*, issue_id: int, status: str, steps: list[dict], fix: d group.save() +def get_autofix_state(*, issue_id: int) -> dict: + group: Group = Group.objects.get(id=issue_id) + + metadata = group.data.get("metadata", {}) + autofix_data = metadata.get("autofix", {}) + + return autofix_data + + seer_method_registry = { "on_autofix_step_update": on_autofix_step_update, "on_autofix_complete": on_autofix_complete, + "get_autofix_state": get_autofix_state, } + + +def generate_request_signature(url_path: str, body: bytes) -> str: + """ + Generate a signature for the request body + with the first shared secret. If there are other + shared secrets in the list they are only to be used + for verfication during key rotation. + """ + if not settings.SEER_RPC_SHARED_SECRET: + raise RpcAuthenticationSetupException("Cannot sign RPC requests without RPC_SHARED_SECRET") + + signature_input = b"%s:%s" % ( + url_path.encode("utf8"), + body, + ) + secret = settings.SEER_RPC_SHARED_SECRET[0] + signature = hmac.new(secret.encode("utf-8"), signature_input, hashlib.sha256).hexdigest() + return f"rpc0:{signature}" diff --git a/tests/sentry/api/endpoints/test_group_ai_autofix.py b/tests/sentry/api/endpoints/test_group_ai_autofix.py index 6cbdf151940c7d..ca19273fab54fb 100644 --- a/tests/sentry/api/endpoints/test_group_ai_autofix.py +++ b/tests/sentry/api/endpoints/test_group_ai_autofix.py @@ -1,6 +1,6 @@ from unittest.mock import ANY, patch -from sentry.api.endpoints.group_ai_autofix import GroupAiAutofixEndpoint +from sentry.api.endpoints.group_ai_autofix import TIMEOUT_SECONDS, GroupAiAutofixEndpoint from sentry.models.group import Group from sentry.testutils.cases import APITestCase, SnubaTestCase from sentry.testutils.helpers.datetime import before_now @@ -93,6 +93,7 @@ def test_ai_autofix_post_endpoint(self): ], ANY, "Yes", + TIMEOUT_SECONDS, ) actual_group_arg = mock_call.call_args[0][1] diff --git a/tests/sentry/api/endpoints/test_seer_rpc.py b/tests/sentry/api/endpoints/test_seer_rpc.py new file mode 100644 index 00000000000000..a1bf808fa4c62e --- /dev/null +++ b/tests/sentry/api/endpoints/test_seer_rpc.py @@ -0,0 +1,73 @@ +from typing import Any + +from django.test import override_settings +from django.urls import reverse + +from sentry.api.endpoints.seer_rpc import generate_request_signature +from sentry.testutils.cases import APITestCase +from sentry.utils import json + + +@override_settings(SEER_RPC_SHARED_SECRET=["a-long-value-that-is-hard-to-guess"]) +class TestSeerRpc(APITestCase): + @staticmethod + def _get_path(method_name: str) -> str: + return reverse( + "sentry-api-0-seer-rpc-service", + kwargs={"method_name": method_name}, + ) + + def auth_header(self, path: str, data: dict | str) -> str: + if isinstance(data, dict): + data = json.dumps(data) + signature = generate_request_signature(path, data.encode("utf8")) + + return f"rpcsignature {signature}" + + def test_invalid_endpoint(self): + path = self._get_path("not_a_method") + response = self.client.post(path) + assert response.status_code == 403 + + def test_invalid_authentication(self): + path = self._get_path("on_autofix_step_update") + data: dict[str, Any] = {"args": {"issued_id": 1, "status": "", "steps": []}, "meta": {}} + response = self.client.post(path, data=data, HTTP_AUTHORIZATION="rpcsignature trash") + assert response.status_code == 401 + + def test_404(self): + path = self._get_path("get_autofix_state") + data: dict[str, Any] = {"args": {"issue_id": 1}, "meta": {}} + response = self.client.post( + path, data=data, HTTP_AUTHORIZATION=self.auth_header(path, data) + ) + assert response.status_code == 404 + + def test_step_state_management(self): + group = self.create_group() + + path = self._get_path("get_autofix_state") + data: dict[str, Any] = {"args": {"issue_id": group.id}, "meta": {}} + response = self.client.post( + path, data=data, HTTP_AUTHORIZATION=self.auth_header(path, data) + ) + assert response.status_code == 200 + assert response.json() == {} + + path = self._get_path("on_autofix_step_update") + data = { + "args": {"issue_id": group.id, "status": "thing", "steps": [1, 2, 3]}, + "meta": {}, + } + response = self.client.post( + path, data=data, HTTP_AUTHORIZATION=self.auth_header(path, data) + ) + assert response.status_code == 200 + + path = self._get_path("get_autofix_state") + data = {"args": {"issue_id": group.id}, "meta": {}} + response = self.client.post( + path, data=data, HTTP_AUTHORIZATION=self.auth_header(path, data) + ) + assert response.status_code == 200 + assert response.json() == {"status": "thing", "steps": [1, 2, 3]} From 9ae6f76b335236beba329c7890bee0da19e0c934 Mon Sep 17 00:00:00 2001 From: Abdkhan14 <60121741+Abdkhan14@users.noreply.github.com> Date: Mon, 4 Mar 2024 17:39:37 -0500 Subject: [PATCH 007/145] feat(new-trace): Added logic for red errored rows. (#66194) Screenshot 2024-03-03 at 9 36 22 PM --------- Co-authored-by: Abdullah Khan --- .../performance/newTraceDetails/trace.tsx | 26 +++++-- .../newTraceDetails/traceTree.spec.tsx | 78 +++++++++++++++++++ .../performance/newTraceDetails/traceTree.tsx | 77 +++++++++++++++--- 3 files changed, 164 insertions(+), 17 deletions(-) diff --git a/static/app/views/performance/newTraceDetails/trace.tsx b/static/app/views/performance/newTraceDetails/trace.tsx index d01c6c8f2941a7..88200afdf4e22a 100644 --- a/static/app/views/performance/newTraceDetails/trace.tsx +++ b/static/app/views/performance/newTraceDetails/trace.tsx @@ -609,7 +609,7 @@ function RenderRow(props: { }} >
props.onExpand(e, props.node, !props.node.expanded)} + errored={props.node.has_error} > {COUNT_FORMATTER.format(props.node.groupCount)} @@ -652,6 +653,9 @@ function RenderRow(props: { } if (isTransactionNode(props.node)) { + const errored = + props.node.value.errors.length > 0 || + props.node.value.performance_issues.length > 0; return (
{props.node.children.length > 0 || props.node.canFetch ? ( @@ -708,6 +713,7 @@ function RenderRow(props: { ? props.onZoomIn(e, props.node, !props.node.zoomedIn) : props.onExpand(e, props.node, !props.node.expanded) } + errored={errored} > {props.node.children.length > 0 ? COUNT_FORMATTER.format(props.node.children.length) @@ -742,6 +748,7 @@ function RenderRow(props: { } if (isSpanNode(props.node)) { + const errored = props.node.value.relatedErrors.length > 0; return (
{props.node.children.length > 0 ? COUNT_FORMATTER.format(props.node.children.length) @@ -1192,9 +1200,13 @@ function ChildrenButton(props: { icon: React.ReactNode; onClick: (e: React.MouseEvent) => void; status: TraceTreeNode['fetchStatus'] | undefined; + errored?: boolean; }) { return ( - + + ); } const target = generateProfileFlamechartRoute({ @@ -543,11 +549,6 @@ function ProfileId({projectSlug, profileId}: {projectSlug: string; profileId?: s ); } -const EmptyValueContainer = styled('span')` - color: ${p => p.theme.gray300}; - ${p => p.theme.overflowEllipsis}; -`; - const SearchBar = styled(SmartSearchBar)` margin-bottom: ${space(2)}; `; From e18b1be414289e5165737a2ed5157f36735a2121 Mon Sep 17 00:00:00 2001 From: Snigdha Sharma Date: Tue, 5 Mar 2024 11:45:54 -0800 Subject: [PATCH 054/145] ref(issues): Add typing to issue status and issue platform files (#66093) --- pyproject.toml | 2 -- src/sentry/api/helpers/group_index/update.py | 1 - src/sentry/issues/occurrence_consumer.py | 17 +++++++++++------ src/sentry/issues/status_change.py | 10 ++++++---- src/sentry/issues/status_change_consumer.py | 4 ++-- tests/sentry/issues/test_occurrence_consumer.py | 6 ++++++ tests/sentry/issues/test_status_change.py | 4 ---- .../issues/test_status_change_consumer.py | 13 ++++++++++--- 8 files changed, 35 insertions(+), 22 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e0753d70c1ffe1..468b1bc3e28438 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -371,9 +371,7 @@ module = [ "sentry.issues.endpoints.group_events", "sentry.issues.endpoints.organization_group_index", "sentry.issues.endpoints.source_map_debug", - "sentry.issues.occurrence_consumer", "sentry.issues.search", - "sentry.issues.status_change", "sentry.middleware.access_log", "sentry.middleware.auth", "sentry.middleware.ratelimit", diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py index 8f627049fd9ff1..a5b82494774ee4 100644 --- a/src/sentry/api/helpers/group_index/update.py +++ b/src/sentry/api/helpers/group_index/update.py @@ -597,7 +597,6 @@ def update_groups( acting_user=acting_user, status_details=result.get("statusDetails", {}), sender=update_groups, - activity_type=activity_type, ) # XXX (ahmed): hack to get the activities to work properly on issues page. Not sure of diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index e9699900c3bbe9..e11bbc35c7f81f 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -8,7 +8,7 @@ import jsonschema import sentry_sdk from django.utils import timezone -from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry_sdk.tracing import NoOpSpan, Span, Transaction from sentry import nodestore from sentry.event_manager import GroupInfo @@ -52,7 +52,7 @@ def save_event_from_occurrence( def lookup_event(project_id: int, event_id: str) -> Event: - data = nodestore.get(Event.generate_node_id(project_id, event_id)) + data = nodestore.backend.get(Event.generate_node_id(project_id, event_id)) if data is None: raise EventLookupError(f"Failed to lookup event({event_id}) for project_id({project_id})") event = Event(event_id=event_id, project_id=project_id) @@ -214,8 +214,8 @@ def _get_kwargs(payload: Mapping[str, Any]) -> Mapping[str, Any]: def process_occurrence_message( - message: Mapping[str, Any], txn: Transaction | NoOpSpan -) -> tuple[IssueOccurrence, GroupInfo | None]: + message: Mapping[str, Any], txn: Transaction | NoOpSpan | Span +) -> tuple[IssueOccurrence, GroupInfo | None] | None: with metrics.timer("occurrence_consumer._process_message._get_kwargs"): kwargs = _get_kwargs(message) occurrence_data = kwargs["occurrence_data"] @@ -260,7 +260,9 @@ def process_occurrence_message( return lookup_event_and_process_issue_occurrence(kwargs["occurrence_data"]) -def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, GroupInfo | None] | None: +def _process_message( + message: Mapping[str, Any] +) -> tuple[IssueOccurrence | None, GroupInfo | None] | None: """ :raises InvalidEventPayloadError: when the message is invalid :raises EventLookupError: when the provided event_id in the message couldn't be found. @@ -275,6 +277,9 @@ def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, Group payload_type = message.get("payload_type", PayloadType.OCCURRENCE.value) if payload_type == PayloadType.STATUS_CHANGE.value: group = process_status_change_message(message, txn) + if not group: + return None + return None, GroupInfo(group=group, is_new=False, is_regression=False) elif payload_type == PayloadType.OCCURRENCE.value: return process_occurrence_message(message, txn) @@ -287,4 +292,4 @@ def _process_message(message: Mapping[str, Any]) -> tuple[IssueOccurrence, Group except (ValueError, KeyError) as e: txn.set_tag("result", "error") raise InvalidEventPayloadError(e) - return + return None diff --git a/src/sentry/issues/status_change.py b/src/sentry/issues/status_change.py index 3f2b669a01353d..20595ab88cd651 100644 --- a/src/sentry/issues/status_change.py +++ b/src/sentry/issues/status_change.py @@ -31,18 +31,21 @@ def handle_status_update( is_bulk: bool, status_details: dict[str, Any], acting_user: User | None, - activity_type: str | None, sender: Any, ) -> ActivityInfo: """ Update the status for a list of groups and create entries for Activity and GroupHistory. + This currently handles unresolving or ignoring groups. Returns a tuple of (activity_type, activity_data) for the activity that was created. """ activity_data = {} + activity_type = ( + ActivityType.SET_IGNORED.value + if new_status == GroupStatus.IGNORED + else ActivityType.SET_UNRESOLVED.value + ) if new_status == GroupStatus.UNRESOLVED: - activity_type = ActivityType.SET_UNRESOLVED.value - for group in group_list: if group.status == GroupStatus.IGNORED: issue_unignored.send_robust( @@ -64,7 +67,6 @@ def handle_status_update( ignore_duration = ( status_details.pop("ignoreDuration", None) or status_details.pop("snoozeDuration", None) ) or None - activity_type = ActivityType.SET_IGNORED.value activity_data = { "ignoreCount": status_details.get("ignoreCount", None), "ignoreDuration": ignore_duration, diff --git a/src/sentry/issues/status_change_consumer.py b/src/sentry/issues/status_change_consumer.py index f5606f4d4d998b..0b2230ca1f67ff 100644 --- a/src/sentry/issues/status_change_consumer.py +++ b/src/sentry/issues/status_change_consumer.py @@ -5,7 +5,7 @@ from collections.abc import Iterable, Mapping, Sequence from typing import Any -from sentry_sdk.tracing import NoOpSpan, Transaction +from sentry_sdk.tracing import NoOpSpan, Span, Transaction from sentry.issues.escalating import manage_issue_states from sentry.issues.status_change_message import StatusChangeMessageData @@ -174,7 +174,7 @@ def _get_status_change_kwargs(payload: Mapping[str, Any]) -> Mapping[str, Any]: def process_status_change_message( - message: Mapping[str, Any], txn: Transaction | NoOpSpan + message: Mapping[str, Any], txn: Transaction | NoOpSpan | Span ) -> Group | None: with metrics.timer("occurrence_consumer._process_message.status_change._get_kwargs"): kwargs = _get_status_change_kwargs(message) diff --git a/tests/sentry/issues/test_occurrence_consumer.py b/tests/sentry/issues/test_occurrence_consumer.py index 4b2e1fd0de583f..9c89857715aa2a 100644 --- a/tests/sentry/issues/test_occurrence_consumer.py +++ b/tests/sentry/issues/test_occurrence_consumer.py @@ -90,6 +90,7 @@ def test_occurrence_consumer_with_event(self) -> None: result = _process_message(message) assert result is not None occurrence = result[0] + assert occurrence is not None fetched_occurrence = IssueOccurrence.fetch(occurrence.id, self.project.id) assert fetched_occurrence is not None @@ -113,6 +114,7 @@ def test_process_profiling_occurrence(self) -> None: assert result is not None project_id = event_data["event"]["project_id"] occurrence = result[0] + assert occurrence is not None event = eventstore.backend.get_event_by_id(project_id, event_data["event"]["event_id"]) event = event.for_group(event.group) @@ -156,6 +158,7 @@ def test_occurrence_consumer_without_payload_type(self) -> None: result = _process_message(message) assert result is not None occurrence = result[0] + assert occurrence is not None fetched_occurrence = IssueOccurrence.fetch(occurrence.id, self.project.id) assert fetched_occurrence is not None @@ -177,6 +180,7 @@ def test_issue_platform_default_priority(self): result = _process_message(message) assert result is not None occurrence = result[0] + assert occurrence is not None group = Group.objects.filter(grouphash__hash=occurrence.fingerprint[0]).first() assert group.priority == PriorityLevel.LOW @@ -189,6 +193,7 @@ def test_issue_platform_override_priority(self): result = _process_message(message) assert result is not None occurrence = result[0] + assert occurrence is not None group = Group.objects.filter(grouphash__hash=occurrence.fingerprint[0]).first() assert group.priority == PriorityLevel.HIGH @@ -224,6 +229,7 @@ def test_transaction_lookup(self) -> None: processed = _process_message(message) assert processed is not None occurrence, _ = processed[0], processed[1] + assert occurrence is not None fetched_event = self.eventstore.get_event_by_id(self.project.id, occurrence.event_id) assert fetched_event is not None diff --git a/tests/sentry/issues/test_status_change.py b/tests/sentry/issues/test_status_change.py index 7f47b297ec0d87..db2b7bd5a3e2b1 100644 --- a/tests/sentry/issues/test_status_change.py +++ b/tests/sentry/issues/test_status_change.py @@ -33,7 +33,6 @@ def test_unresolve_ignored_issue(self, issue_unignored: Any) -> None: new_status=GroupStatus.UNRESOLVED, new_substatus=GroupSubStatus.ONGOING, sender=self, - activity_type=None, ) assert issue_unignored.called @@ -59,7 +58,6 @@ def test_unresolve_resolved_issue(self, issue_unresolved: Any) -> None: is_bulk=True, status_details={}, sender=self, - activity_type=None, ) assert issue_unresolved.called @@ -85,7 +83,6 @@ def test_ignore_new_issue(self, issue_ignored: Any) -> None: is_bulk=True, status_details={"ignoreDuration": 30}, sender=self, - activity_type=None, ) assert issue_ignored.called @@ -111,7 +108,6 @@ def test_ignore_until_escalating(self, issue_ignored: Any) -> None: is_bulk=True, status_details={"ignoreUntilEscalating": True}, sender=self, - activity_type=None, ) assert issue_ignored.called diff --git a/tests/sentry/issues/test_status_change_consumer.py b/tests/sentry/issues/test_status_change_consumer.py index de362a0e1b5f74..318102e5f6305d 100644 --- a/tests/sentry/issues/test_status_change_consumer.py +++ b/tests/sentry/issues/test_status_change_consumer.py @@ -42,6 +42,7 @@ def setUp(self): assert result is not None self.occurrence = result[0] + assert self.occurrence is not None self.group = Group.objects.get(grouphash__hash=self.occurrence.fingerprint[0]) self.fingerprint = ["touch-id"] @@ -168,7 +169,9 @@ def setUp(self): result = _process_message(message) assert result is not None - self.occurrence = result[0] + occurrence = result[0] + assert occurrence is not None + self.occurrence = occurrence self.group = Group.objects.get(grouphash__hash=self.occurrence.fingerprint[0]) self.fingerprint = ["touch-id"] @@ -188,6 +191,7 @@ def test_bulk_get_multiple_projects(self) -> None: result = _process_message(message) assert result is not None occurrence2 = result[0] + assert occurrence2 is not None group2 = Group.objects.get(grouphash__hash=occurrence2.fingerprint[0]) # get groups by fingerprint @@ -211,7 +215,9 @@ def test_bulk_get_missing_hash(self, mock_logger_error: MagicMock) -> None: with self.feature("organizations:profile-file-io-main-thread-ingest"): result = _process_message(message) assert result is not None - assert Group.objects.filter(grouphash__hash=result[0].fingerprint[0]).exists() + occurrence2 = result[0] + assert occurrence2 is not None + assert Group.objects.filter(grouphash__hash=occurrence2.fingerprint[0]).exists() # get groups by fingerprint groups_by_fingerprint = bulk_get_groups_from_fingerprints( @@ -241,7 +247,8 @@ def test_bulk_get_same_fingerprint(self) -> None: result = _process_message(message) assert result is not None occurrence2 = result[0] - group2 = Group.objects.get(grouphash__hash=result[0].fingerprint[0], project=project2) + assert occurrence2 is not None + group2 = Group.objects.get(grouphash__hash=occurrence2.fingerprint[0], project=project2) assert occurrence2.fingerprint[0] == self.occurrence.fingerprint[0] From 89bf6ae2d4141157c6985ed15911e1a120f56ed9 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 5 Mar 2024 11:46:41 -0800 Subject: [PATCH 055/145] ref(feedback): update links and system default for onboarding (#66330) Changed snippet example to `colorScheme: "system"`: SCR-20240305-jdjj Updated links and hyperlink to the specific "bring your own button" section in the docs: SCR-20240305-jcve --- .../utils/feedbackOnboarding.tsx | 15 +++++++++++---- .../gettingStartedDocs/capacitor/capacitor.tsx | 7 +++++-- .../app/gettingStartedDocs/electron/electron.tsx | 5 ++++- .../app/gettingStartedDocs/javascript/angular.tsx | 7 +++++-- .../app/gettingStartedDocs/javascript/astro.tsx | 5 ++++- .../app/gettingStartedDocs/javascript/ember.tsx | 7 +++++-- .../app/gettingStartedDocs/javascript/gatsby.tsx | 7 +++++-- .../gettingStartedDocs/javascript/javascript.tsx | 7 +++++-- .../app/gettingStartedDocs/javascript/nextjs.tsx | 5 ++++- .../app/gettingStartedDocs/javascript/react.tsx | 7 +++++-- .../app/gettingStartedDocs/javascript/remix.tsx | 5 ++++- .../app/gettingStartedDocs/javascript/svelte.tsx | 7 +++++-- .../gettingStartedDocs/javascript/sveltekit.tsx | 5 ++++- static/app/gettingStartedDocs/javascript/vue.tsx | 5 ++++- 14 files changed, 70 insertions(+), 24 deletions(-) diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx index 2bfd5be647abd7..a16ffc3d9b77bc 100644 --- a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx @@ -2,13 +2,20 @@ import Alert from 'sentry/components/alert'; import ExternalLink from 'sentry/components/links/externalLink'; import {t, tct} from 'sentry/locale'; -export const getFeedbackConfigureDescription = ({link}: {link: string}) => +export const getFeedbackConfigureDescription = ({ + linkConfig, + linkButton, +}: { + linkButton: string; + linkConfig: string; +}) => tct( - 'To set up the integration, add the following to your Sentry initialization. There are many options you can pass to the [code:integrations] constructor to customize your form. [break] [break] You can even link the widget to a custom button if you don’t want to use our autoinjected floating button. Learn more about configuring User Feedback by reading the [link:configuration docs].', + 'To set up the integration, add the following to your Sentry initialization. There are many options you can pass to the [code:integrations] constructor to customize your form. [break] [break] You can even [linkButton:link the widget to a custom button] if you don’t want to use our auto-injected floating button. Learn more about configuring User Feedback by reading the [linkConfig:configuration docs].', { code: , break:
, - link: , + linkConfig: , + linkButton: , } ); @@ -28,7 +35,7 @@ export const getFeedbackSDKSetupSnippet = ({ integrations: [ Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(feedbackOptions)}}), ], });`; diff --git a/static/app/gettingStartedDocs/capacitor/capacitor.tsx b/static/app/gettingStartedDocs/capacitor/capacitor.tsx index 9c5153648717d3..a83bb1a9185fe7 100644 --- a/static/app/gettingStartedDocs/capacitor/capacitor.tsx +++ b/static/app/gettingStartedDocs/capacitor/capacitor.tsx @@ -83,7 +83,7 @@ const getSentryInitLayout = (params: Params, siblingOption: string): string => { ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -446,7 +446,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/capacitor/user-feedback/configuration/#bring-your-own-button', }), configurations: getSetupConfiguration({ params, diff --git a/static/app/gettingStartedDocs/electron/electron.tsx b/static/app/gettingStartedDocs/electron/electron.tsx index 705805d5b9cc91..bd5d80b1f40190 100644 --- a/static/app/gettingStartedDocs/electron/electron.tsx +++ b/static/app/gettingStartedDocs/electron/electron.tsx @@ -268,7 +268,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/electron/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/angular.tsx b/static/app/gettingStartedDocs/javascript/angular.tsx index e1fbb1fa140d13..94d681285cda6d 100644 --- a/static/app/gettingStartedDocs/javascript/angular.tsx +++ b/static/app/gettingStartedDocs/javascript/angular.tsx @@ -218,7 +218,7 @@ function getSdkSetupSnippet(params: Params) { ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -324,7 +324,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/angular/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/astro.tsx b/static/app/gettingStartedDocs/javascript/astro.tsx index 0a36e3da2ea2b9..8685871cab112b 100644 --- a/static/app/gettingStartedDocs/javascript/astro.tsx +++ b/static/app/gettingStartedDocs/javascript/astro.tsx @@ -250,7 +250,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/astro/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/ember.tsx b/static/app/gettingStartedDocs/javascript/ember.tsx index 43d2cf1e2afe4d..3f915b31f4e01e 100644 --- a/static/app/gettingStartedDocs/javascript/ember.tsx +++ b/static/app/gettingStartedDocs/javascript/ember.tsx @@ -40,7 +40,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' } @@ -214,7 +214,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/ember/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/gatsby.tsx b/static/app/gettingStartedDocs/javascript/gatsby.tsx index fea36336302ed3..d8a0d19657933b 100644 --- a/static/app/gettingStartedDocs/javascript/gatsby.tsx +++ b/static/app/gettingStartedDocs/javascript/gatsby.tsx @@ -37,7 +37,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -248,7 +248,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/gatsby/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx index 6b7b24e8a3cf8b..ab7819f2986ff7 100644 --- a/static/app/gettingStartedDocs/javascript/javascript.tsx +++ b/static/app/gettingStartedDocs/javascript/javascript.tsx @@ -36,7 +36,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -215,7 +215,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/nextjs.tsx b/static/app/gettingStartedDocs/javascript/nextjs.tsx index 1d091ae2182b57..c4db4f65170967 100644 --- a/static/app/gettingStartedDocs/javascript/nextjs.tsx +++ b/static/app/gettingStartedDocs/javascript/nextjs.tsx @@ -206,7 +206,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/nextjs/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/react.tsx b/static/app/gettingStartedDocs/javascript/react.tsx index d7be7e63f8825e..01a1d131938f20 100644 --- a/static/app/gettingStartedDocs/javascript/react.tsx +++ b/static/app/gettingStartedDocs/javascript/react.tsx @@ -36,7 +36,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -240,7 +240,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/react/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/remix.tsx b/static/app/gettingStartedDocs/javascript/remix.tsx index 606340e566b4d7..0a8b0e0e386bc1 100644 --- a/static/app/gettingStartedDocs/javascript/remix.tsx +++ b/static/app/gettingStartedDocs/javascript/remix.tsx @@ -194,7 +194,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/remix/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/svelte.tsx b/static/app/gettingStartedDocs/javascript/svelte.tsx index 25279a4a1e4996..008ff9ee40e5ef 100644 --- a/static/app/gettingStartedDocs/javascript/svelte.tsx +++ b/static/app/gettingStartedDocs/javascript/svelte.tsx @@ -38,7 +38,7 @@ Sentry.init({ ? ` Sentry.feedbackIntegration({ // Additional SDK configuration goes in here, for example: -colorScheme: "light", +colorScheme: "system", ${getFeedbackConfigOptions(params.feedbackOptions)}}),` : '' }${ @@ -237,7 +237,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/svelte/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/sveltekit.tsx b/static/app/gettingStartedDocs/javascript/sveltekit.tsx index 55c78fb1762977..a6d8752cd62f33 100644 --- a/static/app/gettingStartedDocs/javascript/sveltekit.tsx +++ b/static/app/gettingStartedDocs/javascript/sveltekit.tsx @@ -153,7 +153,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/sveltekit/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { diff --git a/static/app/gettingStartedDocs/javascript/vue.tsx b/static/app/gettingStartedDocs/javascript/vue.tsx index 7576e055f8d501..a72c0dab216340 100644 --- a/static/app/gettingStartedDocs/javascript/vue.tsx +++ b/static/app/gettingStartedDocs/javascript/vue.tsx @@ -308,7 +308,10 @@ const feedbackOnboarding: OnboardingConfig = { { type: StepType.CONFIGURE, description: getFeedbackConfigureDescription({ - link: 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/', + linkConfig: + 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/configuration/', + linkButton: + 'https://docs.sentry.io/platforms/javascript/guides/vue/user-feedback/configuration/#bring-your-own-button', }), configurations: [ { From c6fa6682d11627d1cbf49227b6748888533cb212 Mon Sep 17 00:00:00 2001 From: Leander Rodrigues Date: Tue, 5 Mar 2024 12:05:50 -0800 Subject: [PATCH 056/145] chore(commit-context): Remove unused feature flag (#66345) With https://github.com/getsentry/sentry/pull/65346 and https://github.com/getsentry/getsentry/pull/12983 merged, self-hosted and all organizations on SaaS now have access to the commit-context feature for suspect commits. The feature flag can be safely removed. --- src/sentry/conf/server.py | 2 - src/sentry/features/permanent.py | 1 - src/sentry/tasks/post_process.py | 5 +- .../api/endpoints/test_event_committers.py | 210 +++++++++--------- .../api/serializers/test_organization.py | 1 - .../notifications/utils/test_participants.py | 82 ++----- tests/sentry/tasks/test_post_process.py | 4 - tests/sentry/utils/test_committers.py | 3 - 8 files changed, 118 insertions(+), 190 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index c75605ce4aa03d..7162fd7e94dc0a 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1479,8 +1479,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:codecov-commit-sha-from-git-blame": False, # The overall flag for codecov integration, gated by plans. "organizations:codecov-integration": False, - # Enable the Commit Context feature - "organizations:commit-context": True, # Enable alerting based on crash free sessions/users "organizations:crash-rate-alerts": True, # Enable creating organizations within sentry diff --git a/src/sentry/features/permanent.py b/src/sentry/features/permanent.py index 38c3e415b35633..3c22b5cecdb565 100644 --- a/src/sentry/features/permanent.py +++ b/src/sentry/features/permanent.py @@ -16,7 +16,6 @@ def register_permanent_features(manager: FeatureManager): "organizations:advanced-search", "organizations:app-store-connect-multiple", "organizations:change-alerts", - "organizations:commit-context", "organizations:codecov-integration", "organizations:crash-rate-alerts", "organizations:custom-symbol-sources", diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 3bb99bdbaa9813..9becab9da18d32 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -1174,10 +1174,7 @@ def process_commits(job: PostProcessJob) -> None: # Cache the integrations check for 4 hours cache.set(integration_cache_key, has_integrations, 14400) - if ( - features.has("organizations:commit-context", event.project.organization) - and has_integrations - ): + if has_integrations: if not job["group_state"]["is_new"]: return diff --git a/tests/sentry/api/endpoints/test_event_committers.py b/tests/sentry/api/endpoints/test_event_committers.py index ec3fcfc8c09063..4ce8a048520bd7 100644 --- a/tests/sentry/api/endpoints/test_event_committers.py +++ b/tests/sentry/api/endpoints/test_event_committers.py @@ -143,114 +143,112 @@ def test_null_stacktrace(self): response = self.client.get(url, format="json") assert response.status_code == 200, response.content - def test_with_commit_context_feature_flag(self): - with self.feature({"organizations:commit-context": True}): - self.login_as(user=self.user) - self.repo = Repository.objects.create( - organization_id=self.organization.id, - name="example", - integration_id=self.integration.id, - ) - self.commit = self.create_commit( - project=self.project, - repo=self.repo, - author=self.create_commit_author(project=self.project, user=self.user), - key="asdfwreqr", - message="placeholder commit message", - ) - event = self.store_event( - data={ - "fingerprint": ["group1"], - "timestamp": iso_format(before_now(minutes=1)), - "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]), - }, - project_id=self.project.id, - ) - - GroupOwner.objects.create( - group=event.group, - user_id=self.user.id, - project=self.project, - organization=self.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - context={"commitId": self.commit.id}, - ) - - url = reverse( - "sentry-api-0-event-file-committers", - kwargs={ - "event_id": event.event_id, - "project_slug": event.project.slug, - "organization_slug": event.project.organization.slug, - }, - ) + def test_with_commit_context(self): + self.login_as(user=self.user) + self.repo = Repository.objects.create( + organization_id=self.organization.id, + name="example", + integration_id=self.integration.id, + ) + self.commit = self.create_commit( + project=self.project, + repo=self.repo, + author=self.create_commit_author(project=self.project, user=self.user), + key="asdfwreqr", + message="placeholder commit message", + ) + event = self.store_event( + data={ + "fingerprint": ["group1"], + "timestamp": iso_format(before_now(minutes=1)), + "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]), + }, + project_id=self.project.id, + ) + + GroupOwner.objects.create( + group=event.group, + user_id=self.user.id, + project=self.project, + organization=self.organization, + type=GroupOwnerType.SUSPECT_COMMIT.value, + context={"commitId": self.commit.id}, + ) - response = self.client.get(url, format="json") - assert response.status_code == 200, response.content - assert len(response.data["committers"]) == 1 - assert response.data["committers"][0]["author"]["username"] == "admin@localhost" - commits = response.data["committers"][0]["commits"] - assert len(commits) == 1 - assert commits[0]["message"] == "placeholder commit message" - assert commits[0]["suspectCommitType"] == "via SCM integration" + url = reverse( + "sentry-api-0-event-file-committers", + kwargs={ + "event_id": event.event_id, + "project_slug": event.project.slug, + "organization_slug": event.project.organization.slug, + }, + ) + + response = self.client.get(url, format="json") + assert response.status_code == 200, response.content + assert len(response.data["committers"]) == 1 + assert response.data["committers"][0]["author"]["username"] == "admin@localhost" + commits = response.data["committers"][0]["commits"] + assert len(commits) == 1 + assert commits[0]["message"] == "placeholder commit message" + assert commits[0]["suspectCommitType"] == "via SCM integration" def test_with_commit_context_pull_request(self): - with self.feature({"organizations:commit-context": True}): - self.login_as(user=self.user) - self.repo = Repository.objects.create( - organization_id=self.organization.id, - name="example", - integration_id=self.integration.id, - ) - commit_author = self.create_commit_author(project=self.project, user=self.user) - self.commit = self.create_commit( - project=self.project, - repo=self.repo, - author=commit_author, - key="asdfwreqr", - message="placeholder commit message", - ) - pull_request = PullRequest.objects.create( - organization_id=self.organization.id, - repository_id=self.repo.id, - key="9", - author=commit_author, - message="waddap", - title="cool pr", - merge_commit_sha=self.commit.key, - ) - event = self.store_event( - data={ - "fingerprint": ["group1"], - "timestamp": iso_format(before_now(minutes=1)), - "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]), - }, - project_id=self.project.id, - ) - - GroupOwner.objects.create( - group=event.group, - user_id=self.user.id, - project=self.project, - organization=self.organization, - type=GroupOwnerType.SUSPECT_COMMIT.value, - context={"commitId": self.commit.id}, - ) - - url = reverse( - "sentry-api-0-event-file-committers", - kwargs={ - "event_id": event.event_id, - "project_slug": event.project.slug, - "organization_slug": event.project.organization.slug, - }, - ) + self.login_as(user=self.user) + self.repo = Repository.objects.create( + organization_id=self.organization.id, + name="example", + integration_id=self.integration.id, + ) + commit_author = self.create_commit_author(project=self.project, user=self.user) + self.commit = self.create_commit( + project=self.project, + repo=self.repo, + author=commit_author, + key="asdfwreqr", + message="placeholder commit message", + ) + pull_request = PullRequest.objects.create( + organization_id=self.organization.id, + repository_id=self.repo.id, + key="9", + author=commit_author, + message="waddap", + title="cool pr", + merge_commit_sha=self.commit.key, + ) + event = self.store_event( + data={ + "fingerprint": ["group1"], + "timestamp": iso_format(before_now(minutes=1)), + "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]), + }, + project_id=self.project.id, + ) - response = self.client.get(url, format="json") - assert response.status_code == 200, response.content + GroupOwner.objects.create( + group=event.group, + user_id=self.user.id, + project=self.project, + organization=self.organization, + type=GroupOwnerType.SUSPECT_COMMIT.value, + context={"commitId": self.commit.id}, + ) + + url = reverse( + "sentry-api-0-event-file-committers", + kwargs={ + "event_id": event.event_id, + "project_slug": event.project.slug, + "organization_slug": event.project.organization.slug, + }, + ) + + response = self.client.get(url, format="json") + assert response.status_code == 200, response.content - commits = response.data["committers"][0]["commits"] - assert len(commits) == 1 - assert "pullRequest" in commits[0] - assert commits[0]["pullRequest"]["id"] == pull_request.key - assert commits[0]["suspectCommitType"] == "via SCM integration" + commits = response.data["committers"][0]["commits"] + assert len(commits) == 1 + assert "pullRequest" in commits[0] + assert commits[0]["pullRequest"]["id"] == pull_request.key + assert commits[0]["suspectCommitType"] == "via SCM integration" diff --git a/tests/sentry/api/serializers/test_organization.py b/tests/sentry/api/serializers/test_organization.py index 06a419a6b06e56..da83af575e378a 100644 --- a/tests/sentry/api/serializers/test_organization.py +++ b/tests/sentry/api/serializers/test_organization.py @@ -66,7 +66,6 @@ def test_simple(self): assert result["features"] == { "advanced-search", "change-alerts", - "commit-context", "crash-rate-alerts", "custom-symbol-sources", "data-forwarding", diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py index 3438949a54fcd4..4fd8da0bb8c982 100644 --- a/tests/sentry/notifications/utils/test_participants.py +++ b/tests/sentry/notifications/utils/test_participants.py @@ -11,7 +11,6 @@ from sentry.models.commit import Commit from sentry.models.groupassignee import GroupAssignee from sentry.models.groupowner import GroupOwner, GroupOwnerType -from sentry.models.grouprelease import GroupRelease from sentry.models.notificationsettingoption import NotificationSettingOption from sentry.models.notificationsettingprovider import NotificationSettingProvider from sentry.models.project import Project @@ -485,55 +484,11 @@ def test_send_to_current_assignee_and_owners(self): def test_send_to_suspect_committers(self): """ - Test suspect committer is added as suggested assignee, where "organizations:commit-context" - flag is not on. - """ - # TODO: Delete this test once Commit Context has GA'd - release = self.create_release(project=self.project, version="v12") - event = self.store_event( - data={ - "platform": "java", - "stacktrace": STACKTRACE, - "tags": {"sentry:release": release.version}, - }, - project_id=self.project.id, - ) - release.set_commits( - [ - { - "id": "a" * 40, - "repository": self.repo.name, - "author_email": "suspectcommitter@example.com", - "author_name": "Suspect Committer", - "message": "fix: Fix bug", - "patch_set": [ - {"path": "src/main/java/io/sentry/example/Application.java", "type": "M"} - ], - }, - ] - ) - assert event.group is not None - GroupRelease.objects.create( - group_id=event.group.id, project_id=self.project.id, release_id=release.id - ) - - self.assert_recipients_are( - self.get_send_to_owners(event), - email=[self.user_suspect_committer.id, self.user.id], - slack=[self.user_suspect_committer.id, self.user.id], - ) - - @with_feature("organizations:commit-context") - def test_send_to_suspect_committers_with_commit_context_feature_flag(self): - """ - Test suspect committer is added as suggested assignee, where "organizations:commit-context" - flag is on. + Test suspect committer is added as suggested assignee """ self.commit = self.create_sample_commit(self.user_suspect_committer) event = self.store_event( - data={ - "stacktrace": STACKTRACE, - }, + data={"stacktrace": STACKTRACE}, project_id=self.project.id, ) @@ -551,11 +506,9 @@ def test_send_to_suspect_committers_with_commit_context_feature_flag(self): slack=[self.user_suspect_committer.id, self.user.id], ) - @with_feature("organizations:commit-context") - def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(self): + def test_send_to_suspect_committers_no_owners(self): """ - Test suspect committer is added as suggested assignee, where no user owns the file and - where the "organizations:commit-context" flag is on. + Test suspect committer is added as suggested assignee, where no user owns the file """ organization = self.create_organization(name="New Organization") project_suspect_committer = self.create_project( @@ -605,17 +558,14 @@ def test_send_to_suspect_committers_no_owners_with_commit_context_feature_flag(s slack=[self.user_suspect_committer.id], ) - @with_feature("organizations:commit-context") - def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): + def test_send_to_suspect_committers_dupe(self): """ Test suspect committer/owner is added as suggested assignee once where the suspect - committer is also the owner and where the "organizations:commit-context" flag is on. + committer is also the owner. """ commit = self.create_sample_commit(self.user) event = self.store_event( - data={ - "stacktrace": STACKTRACE, - }, + data={"stacktrace": STACKTRACE}, project_id=self.project.id, ) @@ -631,17 +581,14 @@ def test_send_to_suspect_committers_dupe_with_commit_context_feature_flag(self): self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) - @with_feature("organizations:commit-context") - def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(self): + def test_send_to_suspect_committers_exception(self): """ Test determine_eligible_recipients throws an exception when get_suspect_committers throws - an exception and returns the file owner, where "organizations:commit-context" flag is on. + an exception and returns the file owner """ invalid_commit_id = 10000 event = self.store_event( - data={ - "stacktrace": STACKTRACE, - }, + data={"stacktrace": STACKTRACE}, project_id=self.project.id, ) @@ -657,20 +604,17 @@ def test_send_to_suspect_committers_exception_with_commit_context_feature_flag(s self.get_send_to_owners(event), email=[self.user.id], slack=[self.user.id] ) - @with_feature("organizations:commit-context") - def test_send_to_suspect_committers_not_project_member_commit_context_feature_flag(self): + def test_send_to_suspect_committers_not_project_member(self): """ Test suspect committer is not added as suggested assignee where the suspect committer - is not part of the project and where the "organizations:commit-context" flag is on. + is not part of the project """ user_suspect_committer_no_team = self.create_user( email="suspectcommitternoteam@example.com", is_active=True ) commit = self.create_sample_commit(user_suspect_committer_no_team) event = self.store_event( - data={ - "stacktrace": STACKTRACE, - }, + data={"stacktrace": STACKTRACE}, project_id=self.project.id, ) diff --git a/tests/sentry/tasks/test_post_process.py b/tests/sentry/tasks/test_post_process.py index 857f42cbfbd926..628dc950091530 100644 --- a/tests/sentry/tasks/test_post_process.py +++ b/tests/sentry/tasks/test_post_process.py @@ -1459,7 +1459,6 @@ def setUp(self): ) ] - @with_feature("organizations:commit-context") @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", return_value=github_blame_return_value, @@ -1481,7 +1480,6 @@ def test_logic_fallback_no_scm(self, mock_get_commit_context): assert not mock_get_commit_context.called - @with_feature("organizations:commit-context") @patch( "sentry.integrations.github_enterprise.GitHubEnterpriseIntegration.get_commit_context_all_frames", ) @@ -1517,7 +1515,6 @@ def test_github_enterprise(self, mock_get_commit_context): type=GroupOwnerType.SUSPECT_COMMIT.value, ) - @with_feature("organizations:commit-context") @patch("sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames") def test_skip_when_not_is_new(self, mock_get_commit_context): """ @@ -1538,7 +1535,6 @@ def test_skip_when_not_is_new(self, mock_get_commit_context): type=GroupOwnerType.SUSPECT_COMMIT.value, ).exists() - @with_feature("organizations:commit-context") @patch( "sentry.integrations.github.GitHubIntegration.get_commit_context_all_frames", ) diff --git a/tests/sentry/utils/test_committers.py b/tests/sentry/utils/test_committers.py index b2351577e402f6..42f882c31123a5 100644 --- a/tests/sentry/utils/test_committers.py +++ b/tests/sentry/utils/test_committers.py @@ -19,7 +19,6 @@ from sentry.silo import SiloMode from sentry.testutils.cases import TestCase from sentry.testutils.helpers.datetime import before_now, iso_format -from sentry.testutils.helpers.features import with_feature from sentry.testutils.silo import assume_test_silo_mode, region_silo_test from sentry.utils.committers import ( _get_commit_file_changes, @@ -739,7 +738,6 @@ def test_matching(self): assert result[0]["commits"][0]["id"] == "a" * 40 assert result[0]["commits"][0]["suspectCommitType"] == "via commit in release" - @with_feature("organizations:commit-context") def test_no_author(self): with assume_test_silo_mode(SiloMode.CONTROL): model = self.create_provider_integration( @@ -926,7 +924,6 @@ def test_no_commits(self): with pytest.raises(Commit.DoesNotExist): get_serialized_event_file_committers(self.project, event) - @with_feature("organizations:commit-context") def test_commit_context_fallback(self): with assume_test_silo_mode(SiloMode.CONTROL): Integration.objects.all().delete() From 274acff072acb3d054f5d1c570bb162388cfc497 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 5 Mar 2024 15:14:41 -0500 Subject: [PATCH 057/145] ref(crons): Remove usage of last_state_change (#66328) This code is no longer needed now that we have stopped generating legacy event hashes in GH-66250 --- src/sentry/monitors/logic/mark_failed.py | 3 +-- src/sentry/monitors/logic/mark_ok.py | 6 +++--- tests/sentry/monitors/logic/test_mark_failed.py | 12 +----------- tests/sentry/monitors/logic/test_mark_ok.py | 5 ----- 4 files changed, 5 insertions(+), 21 deletions(-) diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 35f070d9224fc1..34262bc6468fb4 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -138,8 +138,7 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol # change monitor status + update fingerprint timestamp monitor_env.status = MonitorStatus.ERROR - monitor_env.last_state_change = monitor_env.last_checkin - monitor_env.save(update_fields=("status", "last_state_change")) + monitor_env.save(update_fields=("status",)) # Do not create incident if monitor is muted if not monitor_muted: diff --git a/src/sentry/monitors/logic/mark_ok.py b/src/sentry/monitors/logic/mark_ok.py index e113220a550186..f331d21b4590f2 100644 --- a/src/sentry/monitors/logic/mark_ok.py +++ b/src/sentry/monitors/logic/mark_ok.py @@ -62,11 +62,11 @@ def mark_ok(checkin: MonitorCheckIn, ts: datetime): # Only send an occurrence if we have an active incident for grouphash in active_incidents.values_list("grouphash", flat=True): resolve_incident_group(grouphash, checkin.monitor.project_id) - if active_incidents.update( + + active_incidents.update( resolving_checkin=checkin, resolving_timestamp=checkin.date_added, - ): - params["last_state_change"] = ts + ) else: # Don't update status if incident isn't recovered params.pop("status", None) diff --git a/tests/sentry/monitors/logic/test_mark_failed.py b/tests/sentry/monitors/logic/test_mark_failed.py index 4f5c1182fd740e..174f60886f54c7 100644 --- a/tests/sentry/monitors/logic/test_mark_failed.py +++ b/tests/sentry/monitors/logic/test_mark_failed.py @@ -642,7 +642,6 @@ def test_mark_failed_issue_threshold(self, mock_produce_occurrence_to_kafka): monitor=monitor, environment_id=self.environment.id, status=MonitorStatus.OK, - last_state_change=None, ) MonitorCheckIn.objects.create( @@ -667,8 +666,6 @@ def test_mark_failed_issue_threshold(self, mock_produce_occurrence_to_kafka): # failure has not hit threshold, monitor should be in an OK status monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == MonitorStatus.OK - # check that timestamp has not updated - assert monitor_environment.last_state_change is None # create another OK check-in to break the chain MonitorCheckIn.objects.create( @@ -694,8 +691,6 @@ def test_mark_failed_issue_threshold(self, mock_produce_occurrence_to_kafka): # failure has hit threshold, monitor should be in a failed state monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == MonitorStatus.ERROR - assert monitor_environment.last_state_change == monitor_environment.last_checkin - prior_last_state_change = monitor_environment.last_state_change # check that an incident has been created correctly monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) @@ -713,7 +708,7 @@ def test_mark_failed_issue_threshold(self, mock_produce_occurrence_to_kafka): occurrence = occurrence.to_dict() assert occurrence["fingerprint"][0] == monitor_incident.grouphash - # send another check-in to make sure we don't update last_state_change + # send another check-in to make sure the incident does not change status = next(failure_statuses) checkin = MonitorCheckIn.objects.create( monitor=monitor, @@ -724,7 +719,6 @@ def test_mark_failed_issue_threshold(self, mock_produce_occurrence_to_kafka): mark_failed(checkin, ts=checkin.date_added) monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == MonitorStatus.ERROR - assert monitor_environment.last_state_change == prior_last_state_change # check that incident has not changed monitor_incident = MonitorIncident.objects.get(id=monitor_incident.id) @@ -761,7 +755,6 @@ def test_mark_failed_issue_threshold_timeout(self, mock_produce_occurrence_to_ka monitor=monitor, environment_id=self.environment.id, status=MonitorStatus.OK, - last_state_change=None, ) MonitorCheckIn.objects.create( @@ -794,8 +787,6 @@ def test_mark_failed_issue_threshold_timeout(self, mock_produce_occurrence_to_ka # failure has not hit threshold, monitor should be in an OK status monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == MonitorStatus.OK - # check that timestamp has not updated - assert monitor_environment.last_state_change is None checkin = checkins.pop(0) checkin.update(status=CheckInStatus.TIMEOUT) @@ -804,7 +795,6 @@ def test_mark_failed_issue_threshold_timeout(self, mock_produce_occurrence_to_ka # failure has hit threshold, monitor should be in a failed state monitor_environment = MonitorEnvironment.objects.get(id=monitor_environment.id) assert monitor_environment.status == MonitorStatus.ERROR - assert monitor_environment.last_state_change == monitor_environment.last_checkin # check that an incident has been created correctly monitor_incidents = MonitorIncident.objects.filter(monitor_environment=monitor_environment) diff --git a/tests/sentry/monitors/logic/test_mark_ok.py b/tests/sentry/monitors/logic/test_mark_ok.py index 6d75c28c21ab96..c9269b17f8a2c2 100644 --- a/tests/sentry/monitors/logic/test_mark_ok.py +++ b/tests/sentry/monitors/logic/test_mark_ok.py @@ -91,7 +91,6 @@ def test_mark_ok_recovery_threshold(self, mock_produce_occurrence_to_kafka): monitor=monitor, environment_id=self.environment.id, status=MonitorStatus.ERROR, - last_state_change=None, ) first_checkin = MonitorCheckIn.objects.create( monitor=monitor, @@ -138,8 +137,6 @@ def test_mark_ok_recovery_threshold(self, mock_produce_occurrence_to_kafka): assert monitor_environment.status != MonitorStatus.OK assert monitor_environment.next_checkin == now + timedelta(minutes=1) - # check that timestamp has not updated - assert monitor_environment.last_state_change is None # Incident has not resolved assert incident.resolving_checkin is None assert incident.resolving_timestamp is None @@ -182,8 +179,6 @@ def test_mark_ok_recovery_threshold(self, mock_produce_occurrence_to_kafka): assert monitor_environment.status == MonitorStatus.OK assert monitor_environment.next_checkin == last_checkin.date_added + timedelta(minutes=1) - # check that monitor environment has updated timestamp used for fingerprinting - assert monitor_environment.last_state_change == monitor_environment.last_checkin # Incident resolved assert incident.resolving_checkin == last_checkin assert incident.resolving_timestamp == last_checkin.date_added From 1b4fba041319748672fb306f9ea4a6f70bd560b6 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Tue, 5 Mar 2024 12:21:05 -0800 Subject: [PATCH 058/145] fix(staff): Set access by calling parent method before checking staff (#66266) --- src/sentry/api/permissions.py | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/src/sentry/api/permissions.py b/src/sentry/api/permissions.py index afba82240fecaf..333fb579833cc6 100644 --- a/src/sentry/api/permissions.py +++ b/src/sentry/api/permissions.py @@ -78,16 +78,36 @@ class (that is not StaffPermission) require this mixin because staff does not gi staff_allowed_methods = {"GET", "POST", "PUT", "DELETE"} def has_permission(self, request, *args, **kwargs) -> bool: - # Check for staff before calling super to avoid catching exceptions from super - if request.method in self.staff_allowed_methods and is_active_staff(request): + """ + Calls the parent class's has_permission method. If it returns False or + raises an exception and the method is allowed by the mixin, we then check + if the request is from an active staff. Raised exceptions are not caught + if the request is not allowed by the mixin or from an active staff. + """ + try: + if super().has_permission(request, *args, **kwargs): + return True + except Exception: + if not (request.method in self.staff_allowed_methods and is_active_staff(request)): + raise return True - return super().has_permission(request, *args, **kwargs) + return request.method in self.staff_allowed_methods and is_active_staff(request) def has_object_permission(self, request, *args, **kwargs) -> bool: - # Check for staff before calling super to avoid catching exceptions from super - if request.method in self.staff_allowed_methods and is_active_staff(request): + """ + Calls the parent class's has_object_permission method. If it returns False or + raises an exception and the method is allowed by the mixin, we then check + if the request is from an active staff. Raised exceptions are not caught + if the request is not allowed by the mixin or from an active staff. + """ + try: + if super().has_object_permission(request, *args, **kwargs): + return True + except Exception: + if not (request.method in self.staff_allowed_methods and is_active_staff(request)): + raise return True - return super().has_object_permission(request, *args, **kwargs) + return request.method in self.staff_allowed_methods and is_active_staff(request) def is_not_2fa_compliant(self, request, *args, **kwargs) -> bool: return super().is_not_2fa_compliant(request, *args, **kwargs) and not is_active_staff( From 468f10f34f7f68546d708d413993ec1913a22bdd Mon Sep 17 00:00:00 2001 From: John Date: Tue, 5 Mar 2024 12:37:14 -0800 Subject: [PATCH 059/145] fix(metrics): Update indexer telemetry to handle no messages (#66350) ### Overview Update indexer telemetry to handle no messages in case everything is killswitched --- .../sentry_metrics/consumers/indexer/batch.py | 51 ++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/src/sentry/sentry_metrics/consumers/indexer/batch.py b/src/sentry/sentry_metrics/consumers/indexer/batch.py index c47ef2dba51780..3d3b1251d9b2d5 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/batch.py +++ b/src/sentry/sentry_metrics/consumers/indexer/batch.py @@ -520,6 +520,8 @@ def reconstruct_messages( with metrics.timer("metrics_consumer.reconstruct_messages.emit_payload_metrics"): for use_case_id, metrics_by_type in self._message_metrics.items(): for metric_type, batch_metric in metrics_by_type.items(): + if batch_metric.message_count == 0: + continue metrics.incr( "metrics_consumer.process_message.messages_seen", amount=batch_metric.message_count, @@ -566,33 +568,34 @@ def reconstruct_messages( for use_case_metrics in self._message_metrics.values() for type_metrics in use_case_metrics.values() ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_size_in_batch", - sum( - type_metrics.total_bytes - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + if not num_messages == 0: + metrics.gauge( + "metrics_consumer.process_message.message.avg_size_in_batch", + sum( + type_metrics.total_bytes + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_tags_len_in_batch", - sum( - type_metrics.total_tags_len - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + metrics.gauge( + "metrics_consumer.process_message.message.avg_tags_len_in_batch", + sum( + type_metrics.total_tags_len + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) - metrics.gauge( - "metrics_consumer.process_message.message.avg_value_len_in_batch", - sum( - type_metrics.total_value_len - for use_case_metrics in self._message_metrics.values() - for type_metrics in use_case_metrics.values() + metrics.gauge( + "metrics_consumer.process_message.message.avg_value_len_in_batch", + sum( + type_metrics.total_value_len + for use_case_metrics in self._message_metrics.values() + for type_metrics in use_case_metrics.values() + ) + / num_messages, ) - / num_messages, - ) return IndexerOutputMessageBatch( new_messages, From 242e6a6c636cf95998a0e6981a889bf08fa20ecc Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 5 Mar 2024 12:58:50 -0800 Subject: [PATCH 060/145] feat(discover): Remove replay_id column from default discover table view (#66346) Fixes https://github.com/getsentry/sentry/issues/66322 --- static/app/views/discover/results.tsx | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/static/app/views/discover/results.tsx b/static/app/views/discover/results.tsx index bcdcf016179d0f..c281b73c41e9ef 100644 --- a/static/app/views/discover/results.tsx +++ b/static/app/views/discover/results.tsx @@ -299,14 +299,14 @@ export class Results extends Component { // If the view is not valid, redirect to a known valid state. const {location, organization, selection, isHomepage, savedQuery} = this.props; - const isReplayEnabled = organization.features.includes('session-replay'); - const defaultEventView = Object.assign({}, DEFAULT_EVENT_VIEW, { - fields: isReplayEnabled - ? DEFAULT_EVENT_VIEW.fields.concat(['replayId']) - : DEFAULT_EVENT_VIEW.fields, - }); - - const query = isHomepage && savedQuery ? omit(savedQuery, 'id') : defaultEventView; + // const isReplayEnabled = organization.features.includes('session-replay'); + // const defaultEventView = Object.assign({}, DEFAULT_EVENT_VIEW, { + // fields: isReplayEnabled + // ? DEFAULT_EVENT_VIEW.fields.concat(['replayId']) + // : DEFAULT_EVENT_VIEW.fields, + // }); + + const query = isHomepage && savedQuery ? omit(savedQuery, 'id') : DEFAULT_EVENT_VIEW; const nextEventView = EventView.fromNewQueryWithLocation(query, location); if (nextEventView.project.length === 0 && selection.projects) { nextEventView.project = selection.projects; From 0029dd87b7b7b98fb77dcb11a635276e208824ce Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 5 Mar 2024 13:04:48 -0800 Subject: [PATCH 061/145] fix(ui): Remove custom prism text selection styles (#66240) The existing styles were too similar to the highlighted line color. Using browser default coloring works well enough. --- static/app/styles/prism.tsx | 7 ------- static/app/utils/theme.tsx | 2 -- 2 files changed, 9 deletions(-) diff --git a/static/app/styles/prism.tsx b/static/app/styles/prism.tsx index 69373571a7b74c..00d39fbdb7fdc4 100644 --- a/static/app/styles/prism.tsx +++ b/static/app/styles/prism.tsx @@ -123,13 +123,6 @@ export const prismStyles = (theme: Theme) => css` } } - pre[class*='language-']::selection, - code[class*='language-']::selection, - code[class*='language-'] *::selection { - text-shadow: none; - background: var(--prism-selected); - } - pre[data-line] { position: relative; } diff --git a/static/app/utils/theme.tsx b/static/app/utils/theme.tsx index 508e0700ebc7aa..9c781967b0f132 100644 --- a/static/app/utils/theme.tsx +++ b/static/app/utils/theme.tsx @@ -141,7 +141,6 @@ export const darkColors = { const prismLight = { '--prism-base': '#332B3B', - '--prism-selected': '#F5F3F7', '--prism-inline-code': '#332B3B', '--prism-inline-code-background': '#F5F3F7', '--prism-highlight-background': '#5C78A31C', @@ -158,7 +157,6 @@ const prismLight = { const prismDark = { '--prism-base': '#D6D0DC', - '--prism-selected': '#393041', '--prism-inline-code': '#D6D0DC', '--prism-inline-code-background': '#18121C', '--prism-highlight-background': '#A8A2C31C', From bf65b9e52fed3fda72e7ad55dcb90febec807f96 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 5 Mar 2024 16:30:09 -0500 Subject: [PATCH 062/145] ref(crons): Use `id__in` query for failed check-in occurrence creation (#66327) Follow up of https://github.com/getsentry/sentry/pull/66250/files#r1512012737 --- src/sentry/monitors/logic/mark_failed.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 34262bc6468fb4..0d97ba621595b1 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -181,9 +181,9 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol # Do not create event/occurrence if we don't have a fingerprint if fingerprint: - for previous_checkin in previous_checkins: - checkin_from_db = MonitorCheckIn.objects.get(id=previous_checkin["id"]) - create_issue_platform_occurrence(checkin_from_db, fingerprint) + checkins = MonitorCheckIn.objects.filter(id__in=[c["id"] for c in previous_checkins]) + for previous_checkin in checkins: + create_issue_platform_occurrence(previous_checkin, fingerprint) monitor_environment_failed.send(monitor_environment=monitor_env, sender=type(monitor_env)) From ff74225790e83c3fa996d4a047ff850969811e94 Mon Sep 17 00:00:00 2001 From: Gabe Villalobos Date: Tue, 5 Mar 2024 13:40:34 -0800 Subject: [PATCH 063/145] fix(hc): Provide region specific urls for chunk-upload config (#66320) --- src/sentry/api/endpoints/chunk.py | 8 ++- src/sentry/options/defaults.py | 4 ++ .../sentry/api/endpoints/test_chunk_upload.py | 53 ++++++++++++++----- 3 files changed, 51 insertions(+), 14 deletions(-) diff --git a/src/sentry/api/endpoints/chunk.py b/src/sentry/api/endpoints/chunk.py index fefc9541b01495..13e7d645d47536 100644 --- a/src/sentry/api/endpoints/chunk.py +++ b/src/sentry/api/endpoints/chunk.py @@ -14,6 +14,7 @@ from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import region_silo_endpoint from sentry.api.bases.organization import OrganizationEndpoint, OrganizationReleasePermission +from sentry.api.utils import generate_region_url from sentry.models.files.fileblob import FileBlob from sentry.ratelimits.config import RateLimitConfig from sentry.utils.files import get_max_file_size @@ -81,7 +82,12 @@ def get(self, request: Request, organization) -> Response: url = relative_url.lstrip(API_PREFIX) # Otherwise, if we do not support them, return an absolute, versioned endpoint with a default, system-wide prefix else: - url = absolute_uri(relative_url) + # We need to generate region specific upload URLs when possible to avoid hitting the API proxy + # which tends to cause timeouts and performance issues for uploads. + base_url = None + if options.get("hybrid_cloud.use_region_specific_upload_url"): + base_url = generate_region_url() + url = absolute_uri(relative_url, base_url) else: # If user overridden upload url prefix, we want an absolute, versioned endpoint, with user-configured prefix url = absolute_uri(relative_url, endpoint) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 74857bec1414aa..e0dc7921309729 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -1653,6 +1653,10 @@ register("hybrid_cloud.region-domain-allow-list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) register("hybrid_cloud.region-user-allow-list", default=[], flags=FLAG_AUTOMATOR_MODIFIABLE) +register( + "hybrid_cloud.use_region_specific_upload_url", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE +) + # Retry controls register("hybridcloud.regionsiloclient.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) register("hybridcloud.rpc.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE) diff --git a/tests/sentry/api/endpoints/test_chunk_upload.py b/tests/sentry/api/endpoints/test_chunk_upload.py index efc829b22f241c..c4c246b1dfc4ed 100644 --- a/tests/sentry/api/endpoints/test_chunk_upload.py +++ b/tests/sentry/api/endpoints/test_chunk_upload.py @@ -15,12 +15,14 @@ MAX_CONCURRENCY, MAX_REQUEST_SIZE, ) +from sentry.api.utils import generate_region_url from sentry.models.apitoken import ApiToken from sentry.models.files.fileblob import FileBlob from sentry.models.files.utils import MAX_FILE_SIZE from sentry.models.organization import Organization from sentry.silo import SiloMode from sentry.testutils.cases import APITestCase +from sentry.testutils.helpers import override_options from sentry.testutils.silo import assume_test_silo_mode, region_silo_test @@ -51,12 +53,18 @@ def test_chunk_parameters(self): assert response.data["url"] == options.get("system.url-prefix") + self.url assert response.data["accept"] == CHUNK_UPLOAD_ACCEPT - options.set("system.upload-url-prefix", "test") - response = self.client.get( - self.url, HTTP_AUTHORIZATION=f"Bearer {self.token.token}", format="json" - ) + with override_options({"system.upload-url-prefix": "test"}): + response = self.client.get( + self.url, HTTP_AUTHORIZATION=f"Bearer {self.token.token}", format="json" + ) - assert response.data["url"] == options.get("system.upload-url-prefix") + self.url + assert response.data["url"] == options.get("system.upload-url-prefix") + self.url + + with override_options({"hybrid_cloud.use_region_specific_upload_url": True}): + response = self.client.get( + self.url, HTTP_AUTHORIZATION=f"Bearer {self.token.token}", format="json" + ) + assert response.data["url"] == generate_region_url() + self.url def test_accept_with_artifact_bundles_v2_option(self): with self.options({"sourcemaps.artifact_bundles.assemble_with_missing_chunks": False}): @@ -123,15 +131,34 @@ def test_relative_url_support(self): ) assert response.data["url"] == options.get("system.url-prefix") + self.url + # Test region upload URLs with option set + with override_options({"hybrid_cloud.use_region_specific_upload_url": True}): + # < 1.70.1 + response = self.client.get( + self.url, + HTTP_AUTHORIZATION=f"Bearer {self.token.token}", + HTTP_USER_AGENT="sentry-cli/1.70.0", + format="json", + ) + assert response.data["url"] == generate_region_url() + self.url + + response = self.client.get( + self.url, + HTTP_AUTHORIZATION=f"Bearer {self.token.token}", + HTTP_USER_AGENT="sentry-cli/0.69.3", + format="json", + ) + assert response.data["url"] == generate_region_url() + self.url + # user overridden upload url prefix has priority, even when calling from sentry-cli that supports relative urls - options.set("system.upload-url-prefix", "test") - response = self.client.get( - self.url, - HTTP_AUTHORIZATION=f"Bearer {self.token.token}", - HTTP_USER_AGENT="sentry-cli/1.70.1", - format="json", - ) - assert response.data["url"] == options.get("system.upload-url-prefix") + self.url + with override_options({"system.upload-url-prefix": "test"}): + response = self.client.get( + self.url, + HTTP_AUTHORIZATION=f"Bearer {self.token.token}", + HTTP_USER_AGENT="sentry-cli/1.70.1", + format="json", + ) + assert response.data["url"] == options.get("system.upload-url-prefix") + self.url def test_large_uploads(self): with self.feature("organizations:large-debug-files"): From e574575fc2586bfa32f3db471cfc2af69a5bd71a Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 5 Mar 2024 13:42:10 -0800 Subject: [PATCH 064/145] ref(feedback): add rust to platforms (#66356) SCR-20240305-lrbx --- static/app/data/platformCategories.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 3da0f1538d171f..bc05082e6507de 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -421,6 +421,7 @@ export const feedbackWebApiPlatforms: readonly PlatformKey[] = [ 'cordova', 'ruby-rack', 'ruby', + 'rust', 'native', 'native-qt', 'native', From db28b55aaa83e263e2c57d1714bbb9a78e4c484b Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 5 Mar 2024 21:49:11 +0000 Subject: [PATCH 065/145] Revert "ref: Move more code to the new way of defining kafka topics and overrides (#66283)" This reverts commit f7ffe5a3d31a87ac334c191cc8eb9550d2e1ebce. Co-authored-by: ayirr7 <47572810+ayirr7@users.noreply.github.com> --- src/sentry/conf/server.py | 10 ++-------- src/sentry/conf/types/kafka_definition.py | 2 +- src/sentry/conf/types/topic_definition.py | 2 -- src/sentry/consumers/__init__.py | 9 ++++----- src/sentry/eventstream/kafka/backend.py | 18 ++++++++---------- src/sentry/issues/attributes.py | 7 +++---- src/sentry/issues/producer.py | 7 +++---- src/sentry/monitors/tasks.py | 14 ++++++-------- src/sentry/replays/lib/kafka.py | 5 +++-- .../replays/usecases/ingest/dom_index.py | 5 +++-- src/sentry/runner/commands/devserver.py | 7 ++++--- src/sentry/sentry_metrics/configuration.py | 8 +++----- .../consumers/indexer/multiprocess.py | 3 +-- src/sentry/usage_accountant/accountant.py | 4 ++-- src/sentry/utils/kafka_config.py | 12 ++++++------ src/sentry/utils/outcomes.py | 5 ++--- tests/sentry/utils/test_outcomes.py | 17 +++++++++++------ tests/snuba/incidents/test_tasks.py | 17 +++++++---------- 18 files changed, 69 insertions(+), 83 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 7162fd7e94dc0a..63344a5753d3c5 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -21,6 +21,7 @@ from sentry.conf.types.logging_config import LoggingConfig from sentry.conf.types.role_dict import RoleDict from sentry.conf.types.sdk_config import ServerSdkConfig +from sentry.conf.types.topic_definition import TopicDefinition from sentry.utils import json # NOQA (used in getsentry config) from sentry.utils.celery import crontab_with_minute_jitter from sentry.utils.types import Type, type_from_value @@ -3528,16 +3529,9 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] "shared-resources-usage": "default", } -from typing import TypedDict - - -class LegacyTopicDefinition(TypedDict): - cluster: str - - # Cluster configuration for each Kafka topic by name. # DEPRECATED -KAFKA_TOPICS: Mapping[str, LegacyTopicDefinition] = { +KAFKA_TOPICS: Mapping[str, TopicDefinition] = { KAFKA_EVENTS: {"cluster": "default"}, KAFKA_EVENTS_COMMIT_LOG: {"cluster": "default"}, KAFKA_TRANSACTIONS: {"cluster": "default"}, diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index 019850320ec941..f9f38687453b7b 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -71,7 +71,7 @@ class ConsumerDefinition(TypedDict, total=False): synchronize_commit_group_default: str synchronize_commit_log_topic_default: str - dlq_topic: Topic + dlq_topic: str dlq_max_invalid_ratio: float | None dlq_max_consecutive_count: int | None diff --git a/src/sentry/conf/types/topic_definition.py b/src/sentry/conf/types/topic_definition.py index bc5aaa44ddef80..41992b74d9ad78 100644 --- a/src/sentry/conf/types/topic_definition.py +++ b/src/sentry/conf/types/topic_definition.py @@ -5,5 +5,3 @@ class TopicDefinition(TypedDict): cluster: str - # The topic name may be overridden from the default via KAFKA_TOPIC_OVERRIDES - real_topic_name: str diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 68aa34db767c41..a74b1060784142 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -298,7 +298,7 @@ def ingest_events_options() -> list[click.Option]: "static_args": { "ingest_profile": "release-health", }, - "dlq_topic": Topic.INGEST_METRICS_DLQ, + "dlq_topic": settings.KAFKA_INGEST_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, @@ -309,7 +309,7 @@ def ingest_events_options() -> list[click.Option]: "static_args": { "ingest_profile": "performance", }, - "dlq_topic": Topic.INGEST_GENERIC_METRICS_DLQ, + "dlq_topic": settings.KAFKA_INGEST_GENERIC_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, @@ -517,8 +517,7 @@ def build_consumer_config(group_id: str): f"Cannot enable DLQ for consumer: {consumer_name}, no DLQ topic has been defined for it" ) from e try: - dlq_topic_defn = get_topic_definition(dlq_topic) - cluster_setting = dlq_topic_defn["cluster"] + cluster_setting = get_topic_definition(dlq_topic)["cluster"] except ValueError as e: raise click.BadParameter( f"Cannot enable DLQ for consumer: {consumer_name}, DLQ topic {dlq_topic} is not configured in this environment" @@ -528,7 +527,7 @@ def build_consumer_config(group_id: str): dlq_producer = KafkaProducer(producer_config) dlq_policy = DlqPolicy( - KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic_defn["real_topic_name"])), + KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic)), DlqLimit( max_invalid_ratio=consumer_definition["dlq_max_invalid_ratio"], max_consecutive_count=consumer_definition["dlq_max_consecutive_count"], diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py index 97fe9b98de8435..4ec2aa728aa37f 100644 --- a/src/sentry/eventstream/kafka/backend.py +++ b/src/sentry/eventstream/kafka/backend.py @@ -7,9 +7,9 @@ from confluent_kafka import KafkaError from confluent_kafka import Message as KafkaMessage from confluent_kafka import Producer +from django.conf import settings from sentry import options -from sentry.conf.types.kafka_definition import Topic from sentry.eventstream.base import EventStreamEventType, GroupStates from sentry.eventstream.snuba import KW_SKIP_SEMANTIC_PARTITIONING, SnubaProtocolEventStream from sentry.killswitches import killswitch_matches_context @@ -24,15 +24,15 @@ class KafkaEventStream(SnubaProtocolEventStream): def __init__(self, **options: Any) -> None: - self.topic = Topic.EVENTS - self.transactions_topic = Topic.TRANSACTIONS - self.issue_platform_topic = Topic.EVENTSTREAM_GENERIC - self.__producers: MutableMapping[Topic, Producer] = {} + self.topic = settings.KAFKA_EVENTS + self.transactions_topic = settings.KAFKA_TRANSACTIONS + self.issue_platform_topic = settings.KAFKA_EVENTSTREAM_GENERIC + self.__producers: MutableMapping[str, Producer] = {} - def get_transactions_topic(self, project_id: int) -> Topic: + def get_transactions_topic(self, project_id: int) -> str: return self.transactions_topic - def get_producer(self, topic: Topic) -> Producer: + def get_producer(self, topic: str) -> Producer: if topic not in self.__producers: cluster_name = get_topic_definition(topic)["cluster"] cluster_options = get_kafka_producer_cluster_options(cluster_name) @@ -202,11 +202,9 @@ def _send( assert isinstance(extra_data, tuple) - real_topic = get_topic_definition(topic)["real_topic_name"] - try: producer.produce( - topic=real_topic, + topic=topic, key=str(project_id).encode("utf-8") if not skip_semantic_partitioning else None, value=json.dumps((self.EVENT_PROTOCOL_VERSION, _type) + extra_data), on_delivery=self.delivery_callback, diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py index 9b15f831558564..5e0e699662f779 100644 --- a/src/sentry/issues/attributes.py +++ b/src/sentry/issues/attributes.py @@ -6,7 +6,7 @@ import requests import urllib3 -from arroyo import Topic as ArroyoTopic +from arroyo import Topic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from django.conf import settings from django.db.models import F, Window @@ -16,7 +16,6 @@ from sentry_kafka_schemas.schema_types.group_attributes_v1 import GroupAttributesSnapshot from sentry import options -from sentry.conf.types.kafka_definition import Topic from sentry.models.group import Group from sentry.models.groupassignee import GroupAssignee from sentry.models.groupowner import GroupOwner, GroupOwnerType @@ -45,7 +44,7 @@ class GroupValues: def _get_attribute_snapshot_producer() -> KafkaProducer: - cluster_name = get_topic_definition(Topic.GROUP_ATTRIBUTES)["cluster"] + cluster_name = get_topic_definition(settings.KAFKA_GROUP_ATTRIBUTES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -123,7 +122,7 @@ def produce_snapshot_to_kafka(snapshot: GroupAttributesSnapshot) -> None: raise snuba.SnubaError(err) else: payload = KafkaPayload(None, json.dumps(snapshot).encode("utf-8"), []) - _attribute_snapshot_producer.produce(ArroyoTopic(settings.KAFKA_GROUP_ATTRIBUTES), payload) + _attribute_snapshot_producer.produce(Topic(settings.KAFKA_GROUP_ATTRIBUTES), payload) def _retrieve_group_values(group_id: int) -> GroupValues: diff --git a/src/sentry/issues/producer.py b/src/sentry/issues/producer.py index 8cd499bbecae72..5acfef85adcf18 100644 --- a/src/sentry/issues/producer.py +++ b/src/sentry/issues/producer.py @@ -4,12 +4,11 @@ from collections.abc import MutableMapping from typing import Any, cast -from arroyo import Topic as ArroyoTopic +from arroyo import Topic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from arroyo.types import Message, Value from django.conf import settings -from sentry.conf.types.kafka_definition import Topic from sentry.issues.issue_occurrence import IssueOccurrence from sentry.issues.run import process_message from sentry.issues.status_change_message import StatusChangeMessage @@ -34,7 +33,7 @@ class PayloadType(ValueEqualityEnum): def _get_occurrence_producer() -> KafkaProducer: - cluster_name = get_topic_definition(Topic.INGEST_OCCURRENCES)["cluster"] + cluster_name = get_topic_definition(settings.KAFKA_INGEST_OCCURRENCES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -69,7 +68,7 @@ def produce_occurrence_to_kafka( process_message(Message(Value(payload=payload, committable={}))) return - _occurrence_producer.produce(ArroyoTopic(settings.KAFKA_INGEST_OCCURRENCES), payload) + _occurrence_producer.produce(Topic(settings.KAFKA_INGEST_OCCURRENCES), payload) def _prepare_occurrence_message( diff --git a/src/sentry/monitors/tasks.py b/src/sentry/monitors/tasks.py index 82fd558235d5a5..79f86b62a7f404 100644 --- a/src/sentry/monitors/tasks.py +++ b/src/sentry/monitors/tasks.py @@ -7,13 +7,11 @@ import msgpack import sentry_sdk -from arroyo import Partition -from arroyo import Topic as ArroyoTopic +from arroyo import Partition, Topic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from confluent_kafka.admin import AdminClient, PartitionMetadata from django.conf import settings -from sentry.conf.types.kafka_definition import Topic from sentry.constants import ObjectStatus from sentry.monitors.logic.mark_failed import mark_failed from sentry.monitors.schedule import get_prev_schedule @@ -52,7 +50,7 @@ def _get_producer() -> KafkaProducer: - cluster_name = get_topic_definition(Topic.INGEST_MONITORS)["cluster"] + cluster_name = get_topic_definition(settings.KAFKA_INGEST_MONITORS)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -64,10 +62,10 @@ def _get_producer() -> KafkaProducer: @lru_cache(maxsize=None) def _get_partitions() -> Mapping[int, PartitionMetadata]: - topic_defn = get_topic_definition(Topic.INGEST_MONITORS) - topic = topic_defn["real_topic_name"] + topic = settings.KAFKA_INGEST_MONITORS + cluster_name = get_topic_definition(topic)["cluster"] - conf = get_kafka_admin_cluster_options(topic_defn["cluster"]) + conf = get_kafka_admin_cluster_options(cluster_name) admin_client = AdminClient(conf) result = admin_client.list_topics(topic) topic_metadata = result.topics.get(topic) @@ -205,7 +203,7 @@ def clock_pulse(current_datetime=None): # topic. This is a requirement to ensure that none of the partitions stall, # since the global clock is tied to the slowest partition. for partition in _get_partitions().values(): - dest = Partition(ArroyoTopic(settings.KAFKA_INGEST_MONITORS), partition.id) + dest = Partition(Topic(settings.KAFKA_INGEST_MONITORS), partition.id) _checkin_producer.produce(dest, payload) diff --git a/src/sentry/replays/lib/kafka.py b/src/sentry/replays/lib/kafka.py index 2bde967b5faf01..26ab2368e649cc 100644 --- a/src/sentry/replays/lib/kafka.py +++ b/src/sentry/replays/lib/kafka.py @@ -1,4 +1,5 @@ -from sentry.conf.types.kafka_definition import Topic +from django.conf import settings + from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition from sentry.utils.pubsub import KafkaPublisher @@ -9,7 +10,7 @@ def initialize_replays_publisher(is_async=False) -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = get_topic_definition(Topic.INGEST_REPLAY_EVENTS) + config = get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( get_kafka_producer_cluster_options(config["cluster"]), asynchronous=is_async, diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index 763162f521e3fe..80b626bb20c987 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -8,8 +8,9 @@ from hashlib import md5 from typing import Any, Literal, TypedDict, cast +from django.conf import settings + from sentry import features -from sentry.conf.types.kafka_definition import Topic from sentry.models.project import Project from sentry.replays.usecases.ingest.events import SentryEvent from sentry.replays.usecases.ingest.issue_creation import ( @@ -218,7 +219,7 @@ def _initialize_publisher() -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = kafka_config.get_topic_definition(Topic.INGEST_REPLAY_EVENTS) + config = kafka_config.get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( kafka_config.get_kafka_producer_cluster_options(config["cluster"]) ) diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index 6e00c6890af293..038a66bfa72238 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -366,11 +366,12 @@ def devserver( from sentry.conf.types.kafka_definition import Topic from sentry.utils.batching_kafka_consumer import create_topics - from sentry.utils.kafka_config import get_topic_definition for topic in Topic: - topic_defn = get_topic_definition(topic) - create_topics(topic_defn["cluster"], [topic_defn["real_topic_name"]]) + default_name = topic.value + physical_name = settings.KAFKA_TOPIC_OVERRIDES.get(default_name, default_name) + cluster_name = settings.KAFKA_TOPIC_TO_CLUSTER[default_name] + create_topics(cluster_name, [physical_name]) if dev_consumer: daemons.append( diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py index a885712f379d64..eddebed13a3220 100644 --- a/src/sentry/sentry_metrics/configuration.py +++ b/src/sentry/sentry_metrics/configuration.py @@ -10,8 +10,6 @@ import sentry_sdk -from sentry.conf.types.kafka_definition import Topic - # The maximum length of a column that is indexed in postgres. It is important to keep this in # sync between the consumers and the models defined in src/sentry/sentry_metrics/models.py MAX_INDEXED_COLUMN_LENGTH = 200 @@ -48,7 +46,7 @@ class MetricsIngestConfiguration: db_backend: IndexerStorage db_backend_options: Mapping[str, Any] input_topic: str - output_topic: Topic + output_topic: str use_case_id: UseCaseKey internal_metrics_tag: str | None writes_limiter_cluster_options: Mapping[str, Any] @@ -81,7 +79,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_METRICS, - output_topic=Topic.SNUBA_METRICS, + output_topic=settings.KAFKA_SNUBA_METRICS, use_case_id=UseCaseKey.RELEASE_HEALTH, internal_metrics_tag="release-health", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS, @@ -98,7 +96,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_PERFORMANCE_METRICS, - output_topic=Topic.SNUBA_GENERIC_METRICS, + output_topic=settings.KAFKA_SNUBA_GENERIC_METRICS, use_case_id=UseCaseKey.PERFORMANCE, internal_metrics_tag="perf", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS_PERFORMANCE, diff --git a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py index 8cb2fdd5639b22..dd56520a20f521 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py +++ b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py @@ -10,7 +10,6 @@ from arroyo.types import Commit, FilteredPayload, Message, Partition from confluent_kafka import Producer -from sentry.conf.types.kafka_definition import Topic from sentry.utils import kafka_config, metrics logger = logging.getLogger(__name__) @@ -19,7 +18,7 @@ class SimpleProduceStep(ProcessingStep[KafkaPayload]): def __init__( self, - output_topic: Topic, + output_topic: str, commit_function: Commit, producer: AbstractProducer[KafkaPayload] | None = None, ) -> None: diff --git a/src/sentry/usage_accountant/accountant.py b/src/sentry/usage_accountant/accountant.py index ee1e98a8c9cc8f..2ecf3c49f75c03 100644 --- a/src/sentry/usage_accountant/accountant.py +++ b/src/sentry/usage_accountant/accountant.py @@ -12,9 +12,9 @@ from arroyo.backends.abstract import Producer from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration +from django.conf import settings from usageaccountant import UsageAccumulator, UsageUnit -from sentry.conf.types.kafka_definition import Topic from sentry.options import get from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition @@ -71,7 +71,7 @@ def record( if _accountant_backend is None: cluster_name = get_topic_definition( - Topic.SHARED_RESOURCES_USAGE, + settings.KAFKA_SHARED_RESOURCES_USAGE, )["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer = KafkaProducer( diff --git a/src/sentry/utils/kafka_config.py b/src/sentry/utils/kafka_config.py index 93e3c4fc87a126..2ca53a67bf3a47 100644 --- a/src/sentry/utils/kafka_config.py +++ b/src/sentry/utils/kafka_config.py @@ -3,7 +3,6 @@ from django.conf import settings -from sentry.conf.types.kafka_definition import Topic from sentry.conf.types.topic_definition import TopicDefinition SUPPORTED_KAFKA_CONFIGURATION = ( @@ -97,8 +96,9 @@ def get_kafka_admin_cluster_options( ) -def get_topic_definition(topic: Topic) -> TopicDefinition: - return { - "cluster": settings.KAFKA_TOPIC_TO_CLUSTER[topic.value], - "real_topic_name": settings.KAFKA_TOPIC_OVERRIDES.get(topic.value, topic.value), - } +def get_topic_definition(topic: str) -> TopicDefinition: + defn = settings.KAFKA_TOPICS.get(topic) + if defn is not None: + return defn + else: + raise ValueError(f"Unknown {topic=}") diff --git a/src/sentry/utils/outcomes.py b/src/sentry/utils/outcomes.py index 19774c0a294a0a..4aa2951b4bc0aa 100644 --- a/src/sentry/utils/outcomes.py +++ b/src/sentry/utils/outcomes.py @@ -6,7 +6,6 @@ from django.conf import settings -from sentry.conf.types.kafka_definition import Topic from sentry.constants import DataCategory from sentry.utils import json, kafka_config, metrics from sentry.utils.dates import to_datetime @@ -73,8 +72,8 @@ def track_outcome( assert isinstance(category, (type(None), DataCategory)) assert isinstance(quantity, int) - outcomes_config = kafka_config.get_topic_definition(Topic.OUTCOMES) - billing_config = kafka_config.get_topic_definition(Topic.OUTCOMES_BILLING) + outcomes_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES) + billing_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES_BILLING) use_billing = outcome.is_billing() diff --git a/tests/sentry/utils/test_outcomes.py b/tests/sentry/utils/test_outcomes.py index c7f6a479c47f06..65a476143d05e9 100644 --- a/tests/sentry/utils/test_outcomes.py +++ b/tests/sentry/utils/test_outcomes.py @@ -4,7 +4,6 @@ import pytest from django.conf import settings -from sentry.conf.types.kafka_definition import Topic from sentry.utils import json, kafka_config, outcomes from sentry.utils.outcomes import Outcome, track_outcome @@ -80,7 +79,9 @@ def test_track_outcome_default(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) + assert cluster_args == ( + kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], + ) assert outcomes.outcomes_publisher (topic_name, payload), _ = setup.mock_publisher.return_value.publish.call_args @@ -116,7 +117,7 @@ def test_track_outcome_billing(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) + assert cluster_args == (kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"],) assert outcomes.outcomes_publisher (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args @@ -135,7 +136,7 @@ def test_track_outcome_billing_topic(setup): settings.KAFKA_TOPICS, { settings.KAFKA_OUTCOMES_BILLING: { - "cluster": kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"], + "cluster": kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], } }, ): @@ -147,7 +148,9 @@ def test_track_outcome_billing_topic(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) + assert cluster_args == ( + kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], + ) assert outcomes.outcomes_publisher (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args @@ -161,7 +164,9 @@ def test_track_outcome_billing_cluster(settings, setup): Checks that outcomes are routed to the dedicated cluster and topic. """ - with mock.patch.dict(settings.KAFKA_TOPIC_TO_CLUSTER, {"outcomes-billing": "different"}): + with mock.patch.dict( + settings.KAFKA_TOPICS, {settings.KAFKA_OUTCOMES_BILLING: {"cluster": "different"}} + ): track_outcome( org_id=1, project_id=1, diff --git a/tests/snuba/incidents/test_tasks.py b/tests/snuba/incidents/test_tasks.py index 3bb5185f072757..9354d9bb79c0e9 100644 --- a/tests/snuba/incidents/test_tasks.py +++ b/tests/snuba/incidents/test_tasks.py @@ -8,7 +8,6 @@ from django.conf import settings from django.core import mail -from sentry.conf.types.kafka_definition import Topic from sentry.incidents.action_handlers import ( EmailActionHandler, generate_incident_trigger_email_context, @@ -41,7 +40,7 @@ class HandleSnubaQueryUpdateTest(TestCase): def setUp(self): super().setUp() - self.topic = Topic.METRICS_SUBSCRIPTIONS_RESULTS + self.topic = "metrics-subscription-results" self.orig_registry = deepcopy(subscriber_registry) cluster_options = kafka_config.get_kafka_admin_cluster_options( @@ -49,18 +48,15 @@ def setUp(self): ) self.admin_client = AdminClient(cluster_options) - topic_defn = kafka_config.get_topic_definition(self.topic) - self.real_topic = topic_defn["real_topic_name"] - self.cluster = topic_defn["cluster"] - - create_topics(self.cluster, [self.real_topic]) + kafka_cluster = kafka_config.get_topic_definition(self.topic)["cluster"] + create_topics(kafka_cluster, [self.topic]) def tearDown(self): super().tearDown() subscriber_registry.clear() subscriber_registry.update(self.orig_registry) - self.admin_client.delete_topics([self.real_topic]) + self.admin_client.delete_topics([self.topic]) metrics._metrics_backend = None @cached_property @@ -97,8 +93,9 @@ def action(self): @cached_property def producer(self): + cluster_name = kafka_config.get_topic_definition(self.topic)["cluster"] conf = { - "bootstrap.servers": settings.KAFKA_CLUSTERS[self.cluster]["common"][ + "bootstrap.servers": settings.KAFKA_CLUSTERS[cluster_name]["common"][ "bootstrap.servers" ], "session.timeout.ms": 6000, @@ -132,7 +129,7 @@ def run_test(self, consumer): "timestamp": "2020-01-01T01:23:45.1234", }, } - self.producer.produce(self.real_topic, json.dumps(message)) + self.producer.produce(self.topic, json.dumps(message)) self.producer.flush() def active_incident(): From 5a5b331282ec817bbb33bd94d19b587eb9cf7dac Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 5 Mar 2024 22:07:34 +0000 Subject: [PATCH 066/145] Revert "ref: Move query subscriptions over to new-style kafka config (#66287)" This reverts commit f9c2ffd3926a9215f245e34e0dae2f0f7134a3fb. Co-authored-by: IanWoodard <17186604+IanWoodard@users.noreply.github.com> --- src/sentry/consumers/__init__.py | 18 +++++++++++++----- .../snuba/query_subscriptions/constants.py | 16 +++++++++------- src/sentry/snuba/query_subscriptions/run.py | 10 ++++------ .../snuba/test_query_subscription_consumer.py | 6 +----- 4 files changed, 27 insertions(+), 23 deletions(-) diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index a74b1060784142..16ecf2ca371a7d 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -230,34 +230,42 @@ def ingest_events_options() -> list[click.Option]: "topic": settings.KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), - "static_args": {"dataset": "events"}, + "static_args": { + "topic": settings.KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS, + }, }, "transactions-subscription-results": { "topic": settings.KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), - "static_args": {"dataset": "transactions"}, + "static_args": { + "topic": settings.KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS, + }, }, "generic-metrics-subscription-results": { "topic": Topic.GENERIC_METRICS_SUBSCRIPTIONS_RESULTS, "validate_schema": True, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), - "static_args": {"dataset": "generic_metrics"}, + "static_args": { + "topic": settings.KAFKA_GENERIC_METRICS_SUBSCRIPTIONS_RESULTS, + }, }, "sessions-subscription-results": { "topic": settings.KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(), "static_args": { - "dataset": "events", + "topic": settings.KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS, }, }, "metrics-subscription-results": { "topic": settings.KAFKA_METRICS_SUBSCRIPTIONS_RESULTS, "strategy_factory": "sentry.snuba.query_subscriptions.run.QuerySubscriptionStrategyFactory", "click_options": multiprocessing_options(default_max_batch_size=100), - "static_args": {"dataset": "metrics"}, + "static_args": { + "topic": settings.KAFKA_METRICS_SUBSCRIPTIONS_RESULTS, + }, }, "ingest-events": { "topic": settings.KAFKA_INGEST_EVENTS, diff --git a/src/sentry/snuba/query_subscriptions/constants.py b/src/sentry/snuba/query_subscriptions/constants.py index ceb49368ac7671..9da183bcaa9d2f 100644 --- a/src/sentry/snuba/query_subscriptions/constants.py +++ b/src/sentry/snuba/query_subscriptions/constants.py @@ -1,7 +1,14 @@ -from sentry.conf.types.kafka_definition import Topic +from django.conf import settings + from sentry.snuba.dataset import Dataset -from sentry.utils.kafka_config import get_topic_definition +topic_to_dataset: dict[str, Dataset] = { + settings.KAFKA_EVENTS_SUBSCRIPTIONS_RESULTS: Dataset.Events, + settings.KAFKA_TRANSACTIONS_SUBSCRIPTIONS_RESULTS: Dataset.Transactions, + settings.KAFKA_GENERIC_METRICS_SUBSCRIPTIONS_RESULTS: Dataset.PerformanceMetrics, + settings.KAFKA_SESSIONS_SUBSCRIPTIONS_RESULTS: Dataset.Sessions, + settings.KAFKA_METRICS_SUBSCRIPTIONS_RESULTS: Dataset.Metrics, +} dataset_to_logical_topic = { Dataset.Events: "events-subscription-results", Dataset.Transactions: "transactions-subscription-results", @@ -9,8 +16,3 @@ Dataset.Sessions: "sessions-subscription-results", Dataset.Metrics: "metrics-subscription-results", } - -topic_to_dataset = { - get_topic_definition(Topic(logical_topic))["real_topic_name"]: dataset - for (dataset, logical_topic) in dataset_to_logical_topic.items() -} diff --git a/src/sentry/snuba/query_subscriptions/run.py b/src/sentry/snuba/query_subscriptions/run.py index f33a6307bd3aa1..bc7a48da357268 100644 --- a/src/sentry/snuba/query_subscriptions/run.py +++ b/src/sentry/snuba/query_subscriptions/run.py @@ -13,12 +13,10 @@ from arroyo.types import BrokerValue, Commit, Message, Partition from sentry_kafka_schemas import get_codec -from sentry.conf.types.kafka_definition import Topic from sentry.features.rollout import in_random_rollout from sentry.snuba.dataset import Dataset -from sentry.snuba.query_subscriptions.constants import dataset_to_logical_topic +from sentry.snuba.query_subscriptions.constants import dataset_to_logical_topic, topic_to_dataset from sentry.utils.arroyo import MultiprocessingPool, RunTaskWithMultiprocessing -from sentry.utils.kafka_config import get_topic_definition logger = logging.getLogger(__name__) @@ -26,7 +24,7 @@ class QuerySubscriptionStrategyFactory(ProcessingStrategyFactory[KafkaPayload]): def __init__( self, - dataset: str, + topic: str, max_batch_size: int, max_batch_time: int, num_processes: int, @@ -34,9 +32,9 @@ def __init__( output_block_size: int | None, multi_proc: bool = True, ): - self.dataset = Dataset(dataset) + self.topic = topic + self.dataset = topic_to_dataset[self.topic] self.logical_topic = dataset_to_logical_topic[self.dataset] - self.topic = get_topic_definition(Topic(self.logical_topic))["real_topic_name"] self.max_batch_size = max_batch_size self.max_batch_time = max_batch_time self.input_block_size = input_block_size diff --git a/tests/sentry/snuba/test_query_subscription_consumer.py b/tests/sentry/snuba/test_query_subscription_consumer.py index ceeddb82165dd6..3a8387c9806a00 100644 --- a/tests/sentry/snuba/test_query_subscription_consumer.py +++ b/tests/sentry/snuba/test_query_subscription_consumer.py @@ -31,10 +31,6 @@ @pytest.mark.snuba_ci class BaseQuerySubscriptionTest: - @cached_property - def dataset(self): - return Dataset.Metrics - @cached_property def topic(self): return settings.KAFKA_METRICS_SUBSCRIPTIONS_RESULTS @@ -101,7 +97,7 @@ def test_arroyo_consumer(self): commit = mock.Mock() partition = Partition(Topic("test"), 0) strategy = QuerySubscriptionStrategyFactory( - self.dataset.value, + self.topic, 1, 1, 1, From 9e6f13bb9692bae3d7726086746a166d173195dc Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 5 Mar 2024 17:15:55 -0500 Subject: [PATCH 067/145] ref(crons): Drop last_state_change column (#66353) This is no longer needed after usage removal in GH-66328 --- migrations_lockfile.txt | 2 +- .../0662_monitor_drop_last_state_change.py | 32 +++++++++++++++++++ src/sentry/monitors/models.py | 5 --- 3 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 src/sentry/migrations/0662_monitor_drop_last_state_change.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index 9cd0c62756ec4a..dd7cfa2517ed34 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0013_add_orgauthtokenreplica_token_index nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0661_artifactbundleindex_cleanup_step2 +sentry: 0662_monitor_drop_last_state_change social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0662_monitor_drop_last_state_change.py b/src/sentry/migrations/0662_monitor_drop_last_state_change.py new file mode 100644 index 00000000000000..46974b99824d80 --- /dev/null +++ b/src/sentry/migrations/0662_monitor_drop_last_state_change.py @@ -0,0 +1,32 @@ +# Generated by Django 5.0.2 on 2024-03-05 21:47 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0661_artifactbundleindex_cleanup_step2"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + database_operations=[], + state_operations=[ + migrations.RemoveField(model_name="monitorenvironment", name="last_state_change"), + ], + ) + ] diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py index 43cc4b8dd1cc1f..79179c626200cc 100644 --- a/src/sentry/monitors/models.py +++ b/src/sentry/monitors/models.py @@ -605,11 +605,6 @@ class MonitorEnvironment(Model): auto-generated missed check-ins. """ - last_state_change = models.DateTimeField(null=True) - """ - The last time that the monitor changed state. Used for issue fingerprinting. - """ - objects: ClassVar[MonitorEnvironmentManager] = MonitorEnvironmentManager() class Meta: From f8649f6fb8644d37bcfa006831dd91a80c120cb9 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Tue, 5 Mar 2024 14:36:59 -0800 Subject: [PATCH 068/145] bump more_slow_alerts to 300 alerts (#66367) [per discussions](https://sentry.slack.com/archives/C06J6HKFKLG/p1708464708399359) bumping the 'more slow alerts' group to 300 alerts. This is a temporary fix and will need to be addressed properly (potentially via [inc-666](https://getsentry.atlassian.net/browse/INC-666), but there may be other solutions here as well) --- src/sentry/conf/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 63344a5753d3c5..3e57b14eb181a4 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -3836,7 +3836,7 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] ENABLE_ANALYTICS = False MAX_SLOW_CONDITION_ISSUE_ALERTS = 100 -MAX_MORE_SLOW_CONDITION_ISSUE_ALERTS = 200 +MAX_MORE_SLOW_CONDITION_ISSUE_ALERTS = 300 MAX_FAST_CONDITION_ISSUE_ALERTS = 500 MAX_QUERY_SUBSCRIPTIONS_PER_ORG = 1000 From 442f7e4ddeb9b0f7cc5923348352eb3db3d135a5 Mon Sep 17 00:00:00 2001 From: Nathan Hsieh <6186377+nhsiehgit@users.noreply.github.com> Date: Tue, 5 Mar 2024 14:47:18 -0800 Subject: [PATCH 069/145] Activated alert rule apis feature flag (#66361) Includes feature flag and monitor type validator in the alert rule api serializer Serializer converts int value into enum --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + src/sentry/incidents/serializers/alert_rule.py | 17 ++++++++++++++++- 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 3e57b14eb181a4..2864079be93516 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1455,6 +1455,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "auth:enterprise-superuser-read-write": False, # Enables user registration. "auth:register": True, + # Enables activated alert rules + "organizations:activated-alert-rules": False, # Enable advanced search features, like negation and wildcard matching. "organizations:advanced-search": True, # Enables alert creation on indexed events in UI (use for PoC/testing only) diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 4382478417bb32..52c016fd4e31d8 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -70,6 +70,7 @@ default_manager.add("relocation:enabled", SystemFeature, FeatureHandlerStrategy.INTERNAL) # Organization scoped features that are in development or in customer trials. +default_manager.add("organizations:activated-alert-rules", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-allow-indexed", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-crash-free-metrics", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:alert-filters", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) diff --git a/src/sentry/incidents/serializers/alert_rule.py b/src/sentry/incidents/serializers/alert_rule.py index 8d3d009fb87a4e..fc134e371cbf14 100644 --- a/src/sentry/incidents/serializers/alert_rule.py +++ b/src/sentry/incidents/serializers/alert_rule.py @@ -26,7 +26,12 @@ translate_aggregate_field, update_alert_rule, ) -from sentry.incidents.models import AlertRule, AlertRuleThresholdType, AlertRuleTrigger +from sentry.incidents.models import ( + AlertRule, + AlertRuleMonitorType, + AlertRuleThresholdType, + AlertRuleTrigger, +) from sentry.snuba.dataset import Dataset from sentry.snuba.entity_subscription import ( ENTITY_TIME_COLUMNS, @@ -196,6 +201,16 @@ def validate_threshold_type(self, threshold_type): % [item.value for item in AlertRuleThresholdType] ) + def validate_monitor_type(self, monitor_type): + if monitor_type > 0 and not features.has( + "organizations:activated-alert-rules", + self.context["organization"], + actor=self.context.get("user", None), + ): + raise serializers.ValidationError("Invalid monitor type") + + return AlertRuleMonitorType(monitor_type) + def validate(self, data): """ Performs validation on an alert rule's data. From 8631dadc6871cd0d5c8d77ec55eac8eb5e03607d Mon Sep 17 00:00:00 2001 From: Colleen O'Rourke Date: Tue, 5 Mar 2024 14:52:39 -0800 Subject: [PATCH 070/145] ref(daily summary): Add logger when sending summary (#66369) Add a log line when we send the notification as I'm doing internal testing. --- src/sentry/tasks/summaries/daily_summary.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/sentry/tasks/summaries/daily_summary.py b/src/sentry/tasks/summaries/daily_summary.py index 46b55784c81923..3f8cfa1d7086fb 100644 --- a/src/sentry/tasks/summaries/daily_summary.py +++ b/src/sentry/tasks/summaries/daily_summary.py @@ -267,6 +267,10 @@ def deliver_summary(ctx: OrganizationReportContext, users: list[int]): for user_id in user_ids: top_projects_context_map = build_top_projects_map(ctx, user_id) user = cast(RpcActor, user_service.get_user(user_id=user_id)) + logger.info( + "daily_summary.delivering_summary", + extra={"user": user_id, "organization": ctx.organization.id}, + ) DailySummaryNotification( organization=ctx.organization, recipient=user, From b9d94b821eb70819e34b4fa78dcc498f4554e424 Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 5 Mar 2024 15:09:55 -0800 Subject: [PATCH 071/145] feat(issue-priority): Add last edited by info and feedback button to dropdown (#66362) --- fixtures/js-stubs/group.ts | 1 + static/app/components/dropdownMenu/footer.tsx | 15 ++ static/app/components/dropdownMenu/list.tsx | 6 + .../components/group/groupPriority.spec.tsx | 54 +++++++ .../group/groupPriority.stories.tsx | 9 +- static/app/components/group/groupPriority.tsx | 134 +++++++++++++++++- static/app/styles/text.tsx | 4 +- static/app/types/group.tsx | 1 + .../app/views/issueDetails/groupPriority.tsx | 6 + 9 files changed, 220 insertions(+), 10 deletions(-) create mode 100644 static/app/components/dropdownMenu/footer.tsx create mode 100644 static/app/components/group/groupPriority.spec.tsx diff --git a/fixtures/js-stubs/group.ts b/fixtures/js-stubs/group.ts index 6da2c6da9c79b0..27662363a478ee 100644 --- a/fixtures/js-stubs/group.ts +++ b/fixtures/js-stubs/group.ts @@ -39,6 +39,7 @@ export function GroupFixture(params: Partial = {}): Group { pluginContexts: [], pluginIssues: [], priority: PriorityLevel.MEDIUM, + priorityLockedAt: null, project: ProjectFixture({ platform: 'javascript', }), diff --git a/static/app/components/dropdownMenu/footer.tsx b/static/app/components/dropdownMenu/footer.tsx new file mode 100644 index 00000000000000..de1c7d14cd5e54 --- /dev/null +++ b/static/app/components/dropdownMenu/footer.tsx @@ -0,0 +1,15 @@ +import styled from '@emotion/styled'; + +import {space} from 'sentry/styles/space'; + +/** + * Provides default styling for custom footer content in a `DropdownMenu`. + */ +export const DropdownMenuFooter = styled('div')` + border-top: solid 1px ${p => p.theme.innerBorder}; + padding: ${space(1)} ${space(1.5)}; + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + display: flex; + align-items: center; +`; diff --git a/static/app/components/dropdownMenu/list.tsx b/static/app/components/dropdownMenu/list.tsx index 95f68b54d20b95..d1907b352230bc 100644 --- a/static/app/components/dropdownMenu/list.tsx +++ b/static/app/components/dropdownMenu/list.tsx @@ -57,6 +57,10 @@ export interface DropdownMenuListProps * Whether the menu should close when an item has been clicked/selected */ closeOnSelect?: boolean; + /** + * To be displayed below the menu items + */ + menuFooter?: React.ReactChild; /** * Title to display on top of the menu */ @@ -74,6 +78,7 @@ function DropdownMenuList({ minMenuWidth, size, menuTitle, + menuFooter, overlayState, overlayPositionProps, ...props @@ -249,6 +254,7 @@ function DropdownMenuList({ > {renderCollection(stateCollection)} + {menuFooter} diff --git a/static/app/components/group/groupPriority.spec.tsx b/static/app/components/group/groupPriority.spec.tsx new file mode 100644 index 00000000000000..06ac034c6a8a84 --- /dev/null +++ b/static/app/components/group/groupPriority.spec.tsx @@ -0,0 +1,54 @@ +import {ActivityFeedFixture} from 'sentry-fixture/activityFeed'; +import {UserFixture} from 'sentry-fixture/user'; + +import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary'; +import {textWithMarkupMatcher} from 'sentry-test/utils'; + +import {GroupPriorityDropdown} from 'sentry/components/group/groupPriority'; +import {GroupActivityType, PriorityLevel} from 'sentry/types'; + +describe('GroupPriority', function () { + describe('GroupPriorityDropdown', function () { + const defaultProps = { + groupId: '1', + onChange: jest.fn(), + value: PriorityLevel.HIGH, + }; + + it('skips request when sent lastEditedBy', async function () { + render(); + + await userEvent.click(screen.getByRole('button', {name: 'Modify issue priority'})); + + expect( + screen.getByText(textWithMarkupMatcher('Last edited by Sentry')) + ).toBeInTheDocument(); + }); + + it('fetches the last priority edit when not passed in', async function () { + MockApiClient.addMockResponse({ + url: '/issues/1/activities/', + body: { + activity: [ + ActivityFeedFixture({ + type: GroupActivityType.SET_PRIORITY, + user: UserFixture({name: 'John Doe'}), + }), + ActivityFeedFixture({ + type: GroupActivityType.SET_PRIORITY, + user: UserFixture({name: 'Other User'}), + }), + ], + }, + }); + + render(); + + await userEvent.click(screen.getByRole('button', {name: 'Modify issue priority'})); + + expect( + await screen.findByText(textWithMarkupMatcher('Last edited by John Doe')) + ).toBeInTheDocument(); + }); + }); +}); diff --git a/static/app/components/group/groupPriority.stories.tsx b/static/app/components/group/groupPriority.stories.tsx index 990a8f5b47d092..6cd95561a725a1 100644 --- a/static/app/components/group/groupPriority.stories.tsx +++ b/static/app/components/group/groupPriority.stories.tsx @@ -24,6 +24,13 @@ export const Dropdown = storyBook(GroupPriorityDropdown, story => { story('Default', () => { const [value, setValue] = useState(PriorityLevel.MEDIUM); - return ; + return ( + + ); }); }); diff --git a/static/app/components/group/groupPriority.tsx b/static/app/components/group/groupPriority.tsx index f15c90aa699132..3b686aa9b6bbf8 100644 --- a/static/app/components/group/groupPriority.tsx +++ b/static/app/components/group/groupPriority.tsx @@ -1,18 +1,31 @@ -import {useMemo} from 'react'; +import {useMemo, useRef} from 'react'; import type {Theme} from '@emotion/react'; import styled from '@emotion/styled'; import {Button} from 'sentry/components/button'; -import {DropdownMenu, type MenuItemProps} from 'sentry/components/dropdownMenu'; +import type {MenuItemProps} from 'sentry/components/dropdownMenu'; +import {DropdownMenu} from 'sentry/components/dropdownMenu'; +import {DropdownMenuFooter} from 'sentry/components/dropdownMenu/footer'; +import useFeedbackWidget from 'sentry/components/feedback/widget/useFeedbackWidget'; +import Placeholder from 'sentry/components/placeholder'; import Tag from 'sentry/components/tag'; import {IconChevron} from 'sentry/icons'; -import {t} from 'sentry/locale'; +import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {PriorityLevel} from 'sentry/types'; +import { + type Activity, + type AvatarUser, + GroupActivityType, + PriorityLevel, +} from 'sentry/types'; +import {defined} from 'sentry/utils'; +import {useApiQuery} from 'sentry/utils/queryClient'; type GroupPriorityDropdownProps = { + groupId: string; onChange: (value: PriorityLevel) => void; value: PriorityLevel; + lastEditedBy?: 'system' | AvatarUser; }; type GroupPriorityBadgeProps = { @@ -40,6 +53,33 @@ function getTagTypeForPriority(priority: string): keyof Theme['tag'] { } } +function useLastEditedBy({ + groupId, + lastEditedBy: incomingLastEditedBy, +}: Pick) { + const {data} = useApiQuery<{activity: Activity[]}>([`/issues/${groupId}/activities/`], { + enabled: !defined(incomingLastEditedBy), + staleTime: 0, + }); + + const lastEditedBy = useMemo(() => { + if (incomingLastEditedBy) { + return incomingLastEditedBy; + } + + if (!data) { + return null; + } + + return ( + data?.activity?.find(activity => activity.type === GroupActivityType.SET_PRIORITY) + ?.user ?? 'system' + ); + }, [data, incomingLastEditedBy]); + + return lastEditedBy; +} + export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps) { return ( @@ -49,7 +89,49 @@ export function GroupPriorityBadge({priority, children}: GroupPriorityBadgeProps ); } -export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownProps) { +function PriorityChangeActor({ + groupId, + lastEditedBy, +}: Pick) { + const resolvedLastEditedBy = useLastEditedBy({groupId, lastEditedBy}); + + if (!resolvedLastEditedBy) { + return ; + } + + if (resolvedLastEditedBy === 'system') { + return Sentry; + } + + return {resolvedLastEditedBy.name}; +} + +function GroupPriorityFeedback() { + const buttonRef = useRef(null); + const feedback = useFeedbackWidget({buttonRef}); + + if (!feedback) { + return null; + } + + return ( + e.stopPropagation()} + > + {t('Give Feedback')} + + ); +} + +export function GroupPriorityDropdown({ + groupId, + value, + onChange, + lastEditedBy, +}: GroupPriorityDropdownProps) { const options: MenuItemProps[] = useMemo(() => { return PRIORITY_OPTIONS.map(priority => ({ textValue: PRIORITY_KEY_TO_LABEL[priority], @@ -62,8 +144,13 @@ export function GroupPriorityDropdown({value, onChange}: GroupPriorityDropdownPr return ( +
{t('Set Priority')}
+ + + } + minMenuWidth={210} trigger={triggerProps => ( )} items={options} + menuFooter={ + +
+ {tct('Last edited by [name]', { + name: , + })} +
+
+ } + position="bottom-end" /> ); } @@ -95,3 +192,26 @@ const StyledTag = styled(Tag)` gap: ${space(0.5)}; } `; + +const InlinePlaceholder = styled(Placeholder)` + display: inline-block; + vertical-align: middle; +`; + +const MenuTitleContainer = styled('div')` + display: flex; + align-items: flex-end; + justify-content: space-between; +`; + +const StyledButton = styled(Button)` + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + font-weight: normal; + padding: 0; + border: none; + + &:hover { + color: ${p => p.theme.subText}; + } +`; diff --git a/static/app/styles/text.tsx b/static/app/styles/text.tsx index a054fbff23a7d2..65780a063e38b3 100644 --- a/static/app/styles/text.tsx +++ b/static/app/styles/text.tsx @@ -10,8 +10,8 @@ const textStyles = () => css` h6, p, /* Exclude ol/ul elements inside interactive selectors/menus */ - ul:not([role='listbox'], [role='grid']), - ol:not([role='listbox'], [role='grid']), + ul:not([role='listbox'], [role='grid'], [role='menu']), + ol:not([role='listbox'], [role='grid'], [role='menu']), table, dl, blockquote, diff --git a/static/app/types/group.tsx b/static/app/types/group.tsx index f60d6e5e914571..0f04cf1260e054 100644 --- a/static/app/types/group.tsx +++ b/static/app/types/group.tsx @@ -784,6 +784,7 @@ export interface BaseGroup { pluginContexts: any[]; // TODO(ts) pluginIssues: TitledPlugin[]; priority: PriorityLevel; + priorityLockedAt: string | null; project: Project; seenBy: User[]; shareId: string; diff --git a/static/app/views/issueDetails/groupPriority.tsx b/static/app/views/issueDetails/groupPriority.tsx index 4242620c252221..18c5af1ec7756c 100644 --- a/static/app/views/issueDetails/groupPriority.tsx +++ b/static/app/views/issueDetails/groupPriority.tsx @@ -44,10 +44,16 @@ function GroupPriority({group}: GroupDetailsPriorityProps) { ); }; + // We can assume that when there is not `priorityLockedAt`, there were no + // user edits to the priority. + const lastEditedBy = !group.priorityLockedAt ? 'system' : undefined; + return ( ); } From 1a3dbf660ae88f582a104d48cad410d8b410e93b Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Tue, 5 Mar 2024 18:12:59 -0500 Subject: [PATCH 072/145] ref(crons): Clean up mark_failed logic (#66372) Just some minor re-organiztion to make it a bit more readible --- src/sentry/monitors/logic/mark_failed.py | 39 ++++++++++++------------ 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 0d97ba621595b1..57829aa25f2f29 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -86,12 +86,11 @@ def mark_failed( monitor_env.refresh_from_db() # Create incidents + issues - use_issue_platform = False try: organization = Organization.objects.get_from_cache(id=monitor_env.monitor.organization_id) use_issue_platform = features.has("organizations:issue-platform", organization=organization) except Organization.DoesNotExist: - pass + use_issue_platform = False if use_issue_platform: return mark_failed_threshold(failed_checkin, failure_issue_threshold) @@ -110,24 +109,7 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol # check to see if we need to update the status if monitor_env.status in [MonitorStatus.OK, MonitorStatus.ACTIVE]: - # evaluation logic for multiple check-ins - if failure_issue_threshold > 1: - # reverse the list after slicing in order to start with oldest check-in - # use .values() to speed up query - previous_checkins = list( - reversed( - MonitorCheckIn.objects.filter( - monitor_environment=monitor_env, date_added__lte=failed_checkin.date_added - ) - .order_by("-date_added") - .values("id", "date_added", "status")[:failure_issue_threshold] - ) - ) - # check for any successful previous check-in - if any([checkin["status"] == CheckInStatus.OK for checkin in previous_checkins]): - return False - # if threshold is 1, just use the most recent check-in - else: + if failure_issue_threshold == 1: previous_checkins = [ { "id": failed_checkin.id, @@ -135,6 +117,23 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol "status": failed_checkin.status, } ] + else: + previous_checkins = ( + # Using .values for performance reasons + MonitorCheckIn.objects.filter( + monitor_environment=monitor_env, date_added__lte=failed_checkin.date_added + ) + .order_by("-date_added") + .values("id", "date_added", "status") + ) + + # reverse the list after slicing in order to start with oldest check-in + previous_checkins = list(reversed(previous_checkins[:failure_issue_threshold])) + + # If we have any successful check-ins within the threshold of + # commits we have NOT reached an incident state + if any([checkin["status"] == CheckInStatus.OK for checkin in previous_checkins]): + return False # change monitor status + update fingerprint timestamp monitor_env.status = MonitorStatus.ERROR From 7445433a67ef110b1368f1646d014a11abde35cc Mon Sep 17 00:00:00 2001 From: Ryan Albrecht Date: Tue, 5 Mar 2024 15:20:03 -0800 Subject: [PATCH 073/145] docs: Wrap stories with OrganizationContainer (#66323) SCR-20240305-ihne --- static/app/utils/useOrganization.stories.tsx | 16 +++++ static/app/views/stories/index.tsx | 73 ++++++++++---------- 2 files changed, 54 insertions(+), 35 deletions(-) create mode 100644 static/app/utils/useOrganization.stories.tsx diff --git a/static/app/utils/useOrganization.stories.tsx b/static/app/utils/useOrganization.stories.tsx new file mode 100644 index 00000000000000..ce58ee699f9700 --- /dev/null +++ b/static/app/utils/useOrganization.stories.tsx @@ -0,0 +1,16 @@ +import ObjectInspector from 'sentry/components/objectInspector'; +import StructuredEventData from 'sentry/components/structuredEventData'; +import storyBook from 'sentry/stories/storyBook'; +import useOrganization from 'sentry/utils/useOrganization'; + +export default storyBook('useOrganization', story => { + story('useOrganization - via ObjectInspector', () => { + const org = useOrganization(); + return ; + }); + + story('useOrganization - via StructuredEventData', () => { + const org = useOrganization(); + return ; + }); +}); diff --git a/static/app/views/stories/index.tsx b/static/app/views/stories/index.tsx index 1144ecb8e2ed83..37e1f8f2400413 100644 --- a/static/app/views/stories/index.tsx +++ b/static/app/views/stories/index.tsx @@ -4,6 +4,7 @@ import styled from '@emotion/styled'; import Input from 'sentry/components/input'; import {space} from 'sentry/styles/space'; +import OrganizationContainer from 'sentry/views/organizationContainer'; import EmptyStory from 'sentry/views/stories/emptyStory'; import ErrorStory from 'sentry/views/stories/errorStory'; import storiesContext from 'sentry/views/stories/storiesContext'; @@ -20,43 +21,45 @@ export default function Stories({location}: Props) { const [searchTerm, setSearchTerm] = useState(''); return ( - -
- setSearchTerm(e.target.value.toLowerCase())} - /> - - s.toLowerCase().includes(searchTerm))} + + +
+ setSearchTerm(e.target.value.toLowerCase())} /> - -
- + + s.toLowerCase().includes(searchTerm))} + /> + +
+ - {story.error ? ( - - - - ) : story.resolved ? ( -
- -
- ) : ( - - - - )} -
+ {story.error ? ( + + + + ) : story.resolved ? ( +
+ +
+ ) : ( + + + + )} + + ); } From 9e85a140c0b7c8afda911d299dc9523890b08c15 Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Tue, 5 Mar 2024 15:33:32 -0800 Subject: [PATCH 074/145] logging(group-owner): adds logging for create group owners (#66371) Used for debugging race conditions of when we create the row vs when we check for it --- src/sentry/tasks/post_process.py | 36 ++++++++++++++++++++++---------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 9becab9da18d32..8b589fdcfac95b 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -358,9 +358,11 @@ def handle_group_owners( lock = locks.get(f"groupowner-bulk:{group.id}", duration=10, name="groupowner_bulk") try: - with metrics.timer("post_process.handle_group_owners"), sentry_sdk.start_span( - op="post_process.handle_group_owners" - ), lock.acquire(): + with ( + metrics.timer("post_process.handle_group_owners"), + sentry_sdk.start_span(op="post_process.handle_group_owners"), + lock.acquire(), + ): current_group_owners = GroupOwner.objects.filter( group=group, type__in=[GroupOwnerType.OWNERSHIP_RULE.value, GroupOwnerType.CODEOWNERS.value], @@ -439,6 +441,15 @@ def handle_group_owners( instance=go, created=True, ) + logger.info( + "group_owners.bulk_create", + extra={ + "group_id": group.id, + "project_id": project.id, + "organization_id": project.organization_id, + "count": len(new_group_owners), + }, + ) except UnableToAcquireLock: pass @@ -737,14 +748,17 @@ def run_post_process_job(job: PostProcessJob): for pipeline_step in pipeline: try: - with metrics.timer( - "tasks.post_process.run_post_process_job.pipeline.duration", - tags={ - "pipeline": pipeline_step.__name__, - "issue_category": issue_category_metric, - "is_reprocessed": job["is_reprocessed"], - }, - ), sentry_sdk.start_span(op=f"tasks.post_process_group.{pipeline_step.__name__}"): + with ( + metrics.timer( + "tasks.post_process.run_post_process_job.pipeline.duration", + tags={ + "pipeline": pipeline_step.__name__, + "issue_category": issue_category_metric, + "is_reprocessed": job["is_reprocessed"], + }, + ), + sentry_sdk.start_span(op=f"tasks.post_process_group.{pipeline_step.__name__}"), + ): pipeline_step(job) except Exception: metrics.incr( From 7d16cb4570e4f29717a48afc81e8c6a1f1307aa6 Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 5 Mar 2024 16:01:42 -0800 Subject: [PATCH 075/145] fix(crons): Fix backfill script to handle the case where a monitor with a slug already exists in the target org (#66340) There are 4 monitors that exist twice in the same project, but in different orgs. The ones that have a different org than the project's org are totally broken and can't receive checkins. We'll just delete them, they're inaccessible at the moment. --- .../0660_fix_cron_monitor_invalid_orgs.py | 9 +++++++-- ...test_0660_fix_cron_monitor_invalid_orgs.py | 19 +++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py b/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py index b7b49e554bf5b0..a23e9a672ef6a1 100644 --- a/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py +++ b/src/sentry/migrations/0660_fix_cron_monitor_invalid_orgs.py @@ -17,8 +17,13 @@ def fix_cron_monitor_invalid_orgs(apps, schema_editor) -> None: continue if project.organization_id != monitor.organization_id: - monitor.organization_id = project.organization_id - monitor.save(update_fields=["organization_id"]) + if Monitor.objects.filter(organization_id=project.organization_id, slug=monitor.slug): + # There are a small number of these and due to the way ingest works they can't + # receive checkins, so they're totally broken. Just delete. + monitor.delete() + else: + monitor.organization_id = project.organization_id + monitor.save(update_fields=["organization_id", "slug"]) class Migration(CheckedMigration): diff --git a/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py b/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py index 61cf5de0f76c94..9696f2305f7d7a 100644 --- a/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py +++ b/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py @@ -1,3 +1,5 @@ +import pytest + from sentry.monitors.models import Monitor from sentry.testutils.cases import TestMigrations @@ -20,9 +22,26 @@ def setup_initial_state(self): slug="invalid-monitor", name="invalid-monitor", ) + self.slug_already_exists = Monitor.objects.create( + organization_id=self.other_org.id, + project_id=self.project.id, + slug="already-exists", + name="already-exists", + ) + self.existing_monitor = Monitor.objects.create( + organization_id=self.project.organization_id, + project_id=self.project.id, + slug="already-exists", + name="already-exists", + ) def test(self): self.valid_monitor.refresh_from_db() self.invalid_monitor.refresh_from_db() + self.existing_monitor.refresh_from_db() assert self.valid_monitor.organization_id == self.project.organization_id assert self.invalid_monitor.organization_id == self.project.organization_id + assert self.existing_monitor.organization_id == self.project.organization_id + assert self.existing_monitor.organization_id == self.project.organization_id + with pytest.raises(Monitor.DoesNotExist): + self.slug_already_exists.refresh_from_db() From 5005304175029a89535ddf4491e0c7adc7c2628e Mon Sep 17 00:00:00 2001 From: Stephen Cefali Date: Tue, 5 Mar 2024 16:19:35 -0800 Subject: [PATCH 076/145] logging(codeowners): adds logging when we delete code group owners (#66358) Debug logging --- src/sentry/tasks/groupowner.py | 18 ++++++++++++++++++ src/sentry/tasks/post_process.py | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/sentry/tasks/groupowner.py b/src/sentry/tasks/groupowner.py index aa0796af38ab21..a89838eb097c58 100644 --- a/src/sentry/tasks/groupowner.py +++ b/src/sentry/tasks/groupowner.py @@ -92,6 +92,15 @@ def _process_suspect_commits( pass else: owner.delete() + logger.info( + "process_suspect_commits.group_owner_removed", + extra={ + "event": event_id, + "group": group_id, + "owner_id": owner.user_id, + "project": project_id, + }, + ) except GroupOwner.MultipleObjectsReturned: GroupOwner.objects.filter( group_id=group_id, @@ -100,6 +109,15 @@ def _process_suspect_commits( project=project, organization_id=project.organization_id, )[0].delete() + logger.info( + "process_suspect_commits.multiple_owners_removed", + extra={ + "event": event_id, + "group": group_id, + "owner_id": owner_id, + "project": project_id, + }, + ) cache.set( cache_key, True, PREFERRED_GROUP_OWNER_AGE.total_seconds() diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index 8b589fdcfac95b..c53204fc7a4044 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -339,6 +339,10 @@ def handle_invalid_group_owners(group): ) for owner in invalid_group_owners: owner.delete() + logger.info( + "handle_invalid_group_owners.delete_group_owner", + extra={"group": group.id, "group_owner_id": owner.id, "project": group.project_id}, + ) def handle_group_owners( @@ -379,6 +383,12 @@ def handle_group_owners( # Owners already in the database that we'll keep keeping_owners = set() for group_owner in current_group_owners: + logging_params = { + "group": group.id, + "project": project.id, + "organization": project.organization_id, + "group_owner_id": group_owner.id, + } owner_rule_type = ( OwnerRuleType.CODEOWNERS.value if group_owner.type == GroupOwnerType.CODEOWNERS.value @@ -393,6 +403,10 @@ def handle_group_owners( lookup_key_value = None if lookup_key not in new_owners: group_owner.delete() + logger.info( + "handle_group_owners.delete_group_owner", + extra={**logging_params, "reason": "assignment_deleted"}, + ) else: lookup_key_value = new_owners.get(lookup_key) # Old groupowner assignment from outdated rules get deleted @@ -401,6 +415,10 @@ def handle_group_owners( and (group_owner.context or {}).get("rule") not in lookup_key_value ): group_owner.delete() + logger.info( + "handle_group_owners.delete_group_owner", + extra={**logging_params, "reason": "outdated_rule"}, + ) else: keeping_owners.add(lookup_key) From ef8ec5705f533cf4a48f0ade71ea9e8542a7ee12 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:23:21 -0800 Subject: [PATCH 077/145] ref(feedback): remove duplicate native platform (#66384) accidentally had two `native` platforms in this list --- static/app/data/platformCategories.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index bc05082e6507de..8737e46e1c8e96 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -424,7 +424,6 @@ export const feedbackWebApiPlatforms: readonly PlatformKey[] = [ 'rust', 'native', 'native-qt', - 'native', 'node-awslambda', 'node-azurefunctions', 'node-connect', From 57a37c3b496b15583f869e3fc2301b7cee884bff Mon Sep 17 00:00:00 2001 From: Malachi Willey Date: Tue, 5 Mar 2024 16:30:20 -0800 Subject: [PATCH 078/145] feat(issue-details): Improved UI for viewing object/array values (#66153) Fixes https://github.com/getsentry/sentry/issues/58573, https://github.com/getsentry/sentry/issues/48480 Previously, objects/arrays were only collapsible after they reached maxDefaultDepth. The major change here is that now each level is collapsible - Each level is now always collapsible. Before, objects/arrays were only collapsible after a certain depth - Arrays/objects begin collapsed when there are more than 5 items, in addition to when they are at a certain depth - Changed the appearance and location of the toggle buttons to better match commonly-used json viewers - Added `n items` preview text for collapsed objects/arrays (which is clickable!) --- .../events/eventExtraData/index.spec.tsx | 1 + .../events/interfaces/request/index.spec.tsx | 2 + .../structuredEventData/collapsibleValue.tsx | 112 +++++++++++ .../structuredEventData/index.spec.tsx | 40 +++- .../structuredEventData/index.stories.tsx | 4 +- .../components/structuredEventData/index.tsx | 185 +++++++++++------- .../components/structuredEventData/toggle.tsx | 75 ------- 7 files changed, 275 insertions(+), 144 deletions(-) create mode 100644 static/app/components/structuredEventData/collapsibleValue.tsx delete mode 100644 static/app/components/structuredEventData/toggle.tsx diff --git a/static/app/components/events/eventExtraData/index.spec.tsx b/static/app/components/events/eventExtraData/index.spec.tsx index d724e4fa70f8a6..0038ce1445fa49 100644 --- a/static/app/components/events/eventExtraData/index.spec.tsx +++ b/static/app/components/events/eventExtraData/index.spec.tsx @@ -178,6 +178,7 @@ describe('EventExtraData', function () { }, }); + await userEvent.click(screen.getByRole('button', {name: 'Expand'})); expect(await screen.findAllByText(/redacted/)).toHaveLength(10); await userEvent.hover(screen.getAllByText(/redacted/)[0]); diff --git a/static/app/components/events/interfaces/request/index.spec.tsx b/static/app/components/events/interfaces/request/index.spec.tsx index 349775b91cb910..df27c7b78df44f 100644 --- a/static/app/components/events/interfaces/request/index.spec.tsx +++ b/static/app/components/events/interfaces/request/index.spec.tsx @@ -174,6 +174,8 @@ describe('Request entry', function () { expect(screen.getAllByText(/redacted/)).toHaveLength(5); + // Expand two levels down + await userEvent.click(await screen.findByLabelText('Expand')); await userEvent.click(await screen.findByLabelText('Expand')); expect(screen.getAllByText(/redacted/)).toHaveLength(7); diff --git a/static/app/components/structuredEventData/collapsibleValue.tsx b/static/app/components/structuredEventData/collapsibleValue.tsx new file mode 100644 index 00000000000000..a979682f878fbc --- /dev/null +++ b/static/app/components/structuredEventData/collapsibleValue.tsx @@ -0,0 +1,112 @@ +import {Children, useState} from 'react'; +import {css} from '@emotion/react'; +import styled from '@emotion/styled'; + +import {Button} from 'sentry/components/button'; +import {IconChevron} from 'sentry/icons'; +import {t, tn} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; + +type CollapsibleValueProps = { + children: React.ReactNode; + closeTag: string; + depth: number; + maxDefaultDepth: number; + openTag: string; + prefix?: React.ReactNode; +}; + +const MAX_ITEMS_BEFORE_AUTOCOLLAPSE = 5; + +export function CollapsibleValue({ + children, + openTag, + closeTag, + prefix = null, + depth, + maxDefaultDepth, +}: CollapsibleValueProps) { + const numChildren = Children.count(children); + const [isExpanded, setIsExpanded] = useState( + numChildren <= MAX_ITEMS_BEFORE_AUTOCOLLAPSE && depth < maxDefaultDepth + ); + + const shouldShowToggleButton = numChildren > 0; + const isBaseLevel = depth === 0; + + // Toggle buttons get placed to the left of the open tag, but if this is the + // base level there is no room for it. So we add padding in this case. + const baseLevelPadding = isBaseLevel && shouldShowToggleButton; + + return ( + + {numChildren > 0 ? ( + setIsExpanded(oldValue => !oldValue)} + icon={ + + } + borderless + baseLevelPadding={baseLevelPadding} + /> + ) : null} + {prefix} + {openTag} + {shouldShowToggleButton && !isExpanded ? ( + setIsExpanded(true)}> + {tn('%s item', '%s items', numChildren)} + + ) : null} + {shouldShowToggleButton && isExpanded ? ( + {children} + ) : null} + {closeTag} + + ); +} + +const CollapsibleDataContainer = styled('span')<{baseLevelPadding: boolean}>` + position: relative; + + ${p => + p.baseLevelPadding && + css` + display: block; + padding-left: ${space(3)}; + `} +`; + +const IndentedValues = styled('div')` + padding-left: ${space(1.5)}; +`; + +const NumItemsButton = styled(Button)` + background: none; + border: none; + padding: 0 2px; + border-radius: 2px; + font-weight: normal; + box-shadow: none; + font-size: ${p => p.theme.fontSizeSmall}; + color: ${p => p.theme.subText}; + margin: 0 ${space(0.5)}; +`; + +const ToggleButton = styled(Button)<{baseLevelPadding: boolean}>` + position: absolute; + left: -${space(3)}; + top: 2px; + border-radius: 2px; + align-items: center; + justify-content: center; + background: none; + border: none; + + ${p => + p.baseLevelPadding && + css` + left: 0; + `} +`; diff --git a/static/app/components/structuredEventData/index.spec.tsx b/static/app/components/structuredEventData/index.spec.tsx index b86cc8803a5ffd..513a1cabbad853 100644 --- a/static/app/components/structuredEventData/index.spec.tsx +++ b/static/app/components/structuredEventData/index.spec.tsx @@ -1,4 +1,4 @@ -import {render, screen, within} from 'sentry-test/reactTestingLibrary'; +import {render, screen, userEvent, within} from 'sentry-test/reactTestingLibrary'; import StructuredEventData from 'sentry/components/structuredEventData'; @@ -60,4 +60,42 @@ describe('ContextData', function () { ).toBeInTheDocument(); }); }); + + describe('collpasible values', function () { + it('auto-collapses objects/arrays with more than 5 items', async function () { + render( + + ); + + expect(screen.getByText('one_child_value')).toBeInTheDocument(); + expect(screen.queryByText('two_child_value')).not.toBeInTheDocument(); + + // Click the "6 items" button to expand the object + await userEvent.click(screen.getByRole('button', {name: '6 items'})); + expect(screen.getByText('two_child_value')).toBeInTheDocument(); + }); + }); + + it('auto-collapses objects/arrays after max depth', async function () { + render(); + + expect(screen.getByText('1')).toBeInTheDocument(); + expect(screen.queryByText('2')).not.toBeInTheDocument(); + + // Click the "2 items" button to expand the array + await userEvent.click(screen.getByRole('button', {name: '2 items'})); + expect(screen.getByText('3')).toBeInTheDocument(); + }); }); diff --git a/static/app/components/structuredEventData/index.stories.tsx b/static/app/components/structuredEventData/index.stories.tsx index 94278575cd6490..eb74ee93f1b602 100644 --- a/static/app/components/structuredEventData/index.stories.tsx +++ b/static/app/components/structuredEventData/index.stories.tsx @@ -18,8 +18,8 @@ export default storyBook(StructuredEventData, story => { - - + + ); }); diff --git a/static/app/components/structuredEventData/index.tsx b/static/app/components/structuredEventData/index.tsx index a2f826443ef6f3..9420f539c8eecd 100644 --- a/static/app/components/structuredEventData/index.tsx +++ b/static/app/components/structuredEventData/index.tsx @@ -3,11 +3,11 @@ import styled from '@emotion/styled'; import {AnnotatedText} from 'sentry/components/events/meta/annotatedText'; import ExternalLink from 'sentry/components/links/externalLink'; +import {CollapsibleValue} from 'sentry/components/structuredEventData/collapsibleValue'; import {IconOpen} from 'sentry/icons'; import {t} from 'sentry/locale'; import {isUrl} from 'sentry/utils'; -import Toggle from './toggle'; import { looksLikeMultiLineString, looksLikeStrippedValue, @@ -75,30 +75,50 @@ function StructuredData({ maxDefaultDepth, withAnnotatedText, meta, + objectKey, }: { config: StructedEventDataConfig | undefined; depth: number; maxDefaultDepth: number; meta: Record | undefined; withAnnotatedText: boolean; + objectKey?: string; // TODO(TS): What possible types can `value` be? value?: any; }) { let i = 0; - const children: React.ReactNode[] = []; + const formattedObjectKey = objectKey ? ( + + + {config?.renderObjectKeys?.(objectKey) ?? objectKey} + + {': '} + + ) : null; + + function Wrapper({children}: {children: React.ReactNode}) { + return ( + + {formattedObjectKey} + {children} + + ); + } if (config?.isNull?.(value) || value === null) { const nullValue = config?.renderNull?.(value) ?? String(value); return ( - - - + + + + + ); } @@ -106,21 +126,29 @@ function StructuredData({ const booleanValue = config?.renderBoolean?.(value) ?? String(value); return ( - - - + + + + + ); } if (typeof value === 'number' || config?.isNumber?.(value)) { return ( - - - + + + + + ); } @@ -129,45 +157,63 @@ function StructuredData({ const stringValue = config.renderString?.(value) ?? value; return ( - - {'"'} - - {'"'} - - + + + {'"'} + + {'"'} + + + ); } if (looksLikeStrippedValue(value)) { return ( - + + + + + + ); + } + + if (looksLikeMultiLineString(value)) { + + - - ); - } - - if (looksLikeMultiLineString(value)) { - - - ; + + ; } return ( - - - - + + + + + + ); } + const children: React.ReactNode[] = []; + if (Array.isArray(value)) { for (i = 0; i < value.length; i++) { children.push( @@ -180,21 +226,27 @@ function StructuredData({ meta={meta?.[i]} maxDefaultDepth={maxDefaultDepth} /> - {i < value.length - 1 ? {', '} : null} + {i < value.length - 1 ? {','} : null}
); } return ( - - {'['} - {children} - {']'} - + + {children} + ); } + if (isValidElement(value)) { return value; } + const keys = Object.keys(value); keys.sort(naturalCaseInsensitiveSort); for (i = 0; i < keys.length; i++) { @@ -202,29 +254,30 @@ function StructuredData({ children.push(
- {config?.renderObjectKeys?.(key) ?? key} - {': '} - - - {i < keys.length - 1 ? {', '} : null} - + + {i < keys.length - 1 ? {','} : null}
); } return ( - - {'{'} - {children} - {'}'} - + + {children} + ); } diff --git a/static/app/components/structuredEventData/toggle.tsx b/static/app/components/structuredEventData/toggle.tsx deleted file mode 100644 index 3cff6e17a57a17..00000000000000 --- a/static/app/components/structuredEventData/toggle.tsx +++ /dev/null @@ -1,75 +0,0 @@ -import {Children, useState} from 'react'; -import styled from '@emotion/styled'; - -import {IconAdd, IconSubtract} from 'sentry/icons'; -import {t} from 'sentry/locale'; - -type Props = { - children: React.ReactNode; - highUp: boolean; -}; - -function Toggle({highUp, children}: Props) { - const [isExpanded, setIsExpanded] = useState(false); - - if (Children.count(children) === 0) { - return null; - } - - const wrappedChildren = {children}; - - if (highUp) { - return wrappedChildren; - } - - return ( - - { - setIsExpanded(!isExpanded); - evt.preventDefault(); - }} - > - {isExpanded ? ( - - ) : ( - - )} - - {isExpanded && wrappedChildren} - - ); -} - -export default Toggle; - -const IconWrapper = styled('div')<{isExpanded: boolean}>` - border-radius: 2px; - display: inline-flex; - align-items: center; - justify-content: center; - cursor: pointer; - ${p => - p.isExpanded - ? ` - background: ${p.theme.gray300}; - border: 1px solid ${p.theme.gray300}; - &:hover { - background: ${p.theme.gray400}; - } - ` - : ` - background: ${p.theme.blue300}; - border: 1px solid ${p.theme.blue300}; - &:hover { - background: ${p.theme.blue200}; - } - `} -`; - -const ValueWrapper = styled('span')` - display: block; - padding: 0 0 0 15px; -`; From 75f021cfafc666334b942975429de0e2bc00370f Mon Sep 17 00:00:00 2001 From: Lyn Nagara Date: Tue, 5 Mar 2024 16:34:17 -0800 Subject: [PATCH 079/145] ref: Move code to new-style kafka topics - take 2 (#66381) Brings back https://github.com/getsentry/sentry/pull/66283 with fixes to the indexer strategy. It crashed on the prior deploy as we were not producing to the correctly resolved topic. --- src/sentry/conf/server.py | 10 ++++++++-- src/sentry/conf/types/kafka_definition.py | 2 +- src/sentry/conf/types/topic_definition.py | 2 ++ src/sentry/consumers/__init__.py | 9 +++++---- src/sentry/eventstream/kafka/backend.py | 18 ++++++++++-------- src/sentry/issues/attributes.py | 7 ++++--- src/sentry/issues/producer.py | 7 ++++--- src/sentry/monitors/tasks.py | 14 ++++++++------ src/sentry/replays/lib/kafka.py | 5 ++--- .../replays/usecases/ingest/dom_index.py | 5 ++--- src/sentry/runner/commands/devserver.py | 7 +++---- src/sentry/sentry_metrics/configuration.py | 8 +++++--- .../consumers/indexer/multiprocess.py | 5 +++-- src/sentry/usage_accountant/accountant.py | 4 ++-- src/sentry/utils/kafka_config.py | 12 ++++++------ src/sentry/utils/outcomes.py | 5 +++-- tests/sentry/utils/test_outcomes.py | 17 ++++++----------- tests/snuba/incidents/test_tasks.py | 17 ++++++++++------- 18 files changed, 84 insertions(+), 70 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 2864079be93516..6be7b064214ccc 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -21,7 +21,6 @@ from sentry.conf.types.logging_config import LoggingConfig from sentry.conf.types.role_dict import RoleDict from sentry.conf.types.sdk_config import ServerSdkConfig -from sentry.conf.types.topic_definition import TopicDefinition from sentry.utils import json # NOQA (used in getsentry config) from sentry.utils.celery import crontab_with_minute_jitter from sentry.utils.types import Type, type_from_value @@ -3531,9 +3530,16 @@ def build_cdc_postgres_init_db_volume(settings: Any) -> dict[str, dict[str, str] "shared-resources-usage": "default", } +from typing import TypedDict + + +class LegacyTopicDefinition(TypedDict): + cluster: str + + # Cluster configuration for each Kafka topic by name. # DEPRECATED -KAFKA_TOPICS: Mapping[str, TopicDefinition] = { +KAFKA_TOPICS: Mapping[str, LegacyTopicDefinition] = { KAFKA_EVENTS: {"cluster": "default"}, KAFKA_EVENTS_COMMIT_LOG: {"cluster": "default"}, KAFKA_TRANSACTIONS: {"cluster": "default"}, diff --git a/src/sentry/conf/types/kafka_definition.py b/src/sentry/conf/types/kafka_definition.py index f9f38687453b7b..019850320ec941 100644 --- a/src/sentry/conf/types/kafka_definition.py +++ b/src/sentry/conf/types/kafka_definition.py @@ -71,7 +71,7 @@ class ConsumerDefinition(TypedDict, total=False): synchronize_commit_group_default: str synchronize_commit_log_topic_default: str - dlq_topic: str + dlq_topic: Topic dlq_max_invalid_ratio: float | None dlq_max_consecutive_count: int | None diff --git a/src/sentry/conf/types/topic_definition.py b/src/sentry/conf/types/topic_definition.py index 41992b74d9ad78..bc5aaa44ddef80 100644 --- a/src/sentry/conf/types/topic_definition.py +++ b/src/sentry/conf/types/topic_definition.py @@ -5,3 +5,5 @@ class TopicDefinition(TypedDict): cluster: str + # The topic name may be overridden from the default via KAFKA_TOPIC_OVERRIDES + real_topic_name: str diff --git a/src/sentry/consumers/__init__.py b/src/sentry/consumers/__init__.py index 16ecf2ca371a7d..5ab163a28f1b27 100644 --- a/src/sentry/consumers/__init__.py +++ b/src/sentry/consumers/__init__.py @@ -306,7 +306,7 @@ def ingest_events_options() -> list[click.Option]: "static_args": { "ingest_profile": "release-health", }, - "dlq_topic": settings.KAFKA_INGEST_METRICS_DLQ, + "dlq_topic": Topic.INGEST_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, @@ -317,7 +317,7 @@ def ingest_events_options() -> list[click.Option]: "static_args": { "ingest_profile": "performance", }, - "dlq_topic": settings.KAFKA_INGEST_GENERIC_METRICS_DLQ, + "dlq_topic": Topic.INGEST_GENERIC_METRICS_DLQ, "dlq_max_invalid_ratio": 0.01, "dlq_max_consecutive_count": 1000, }, @@ -525,7 +525,8 @@ def build_consumer_config(group_id: str): f"Cannot enable DLQ for consumer: {consumer_name}, no DLQ topic has been defined for it" ) from e try: - cluster_setting = get_topic_definition(dlq_topic)["cluster"] + dlq_topic_defn = get_topic_definition(dlq_topic) + cluster_setting = dlq_topic_defn["cluster"] except ValueError as e: raise click.BadParameter( f"Cannot enable DLQ for consumer: {consumer_name}, DLQ topic {dlq_topic} is not configured in this environment" @@ -535,7 +536,7 @@ def build_consumer_config(group_id: str): dlq_producer = KafkaProducer(producer_config) dlq_policy = DlqPolicy( - KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic)), + KafkaDlqProducer(dlq_producer, ArroyoTopic(dlq_topic_defn["real_topic_name"])), DlqLimit( max_invalid_ratio=consumer_definition["dlq_max_invalid_ratio"], max_consecutive_count=consumer_definition["dlq_max_consecutive_count"], diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py index 4ec2aa728aa37f..97fe9b98de8435 100644 --- a/src/sentry/eventstream/kafka/backend.py +++ b/src/sentry/eventstream/kafka/backend.py @@ -7,9 +7,9 @@ from confluent_kafka import KafkaError from confluent_kafka import Message as KafkaMessage from confluent_kafka import Producer -from django.conf import settings from sentry import options +from sentry.conf.types.kafka_definition import Topic from sentry.eventstream.base import EventStreamEventType, GroupStates from sentry.eventstream.snuba import KW_SKIP_SEMANTIC_PARTITIONING, SnubaProtocolEventStream from sentry.killswitches import killswitch_matches_context @@ -24,15 +24,15 @@ class KafkaEventStream(SnubaProtocolEventStream): def __init__(self, **options: Any) -> None: - self.topic = settings.KAFKA_EVENTS - self.transactions_topic = settings.KAFKA_TRANSACTIONS - self.issue_platform_topic = settings.KAFKA_EVENTSTREAM_GENERIC - self.__producers: MutableMapping[str, Producer] = {} + self.topic = Topic.EVENTS + self.transactions_topic = Topic.TRANSACTIONS + self.issue_platform_topic = Topic.EVENTSTREAM_GENERIC + self.__producers: MutableMapping[Topic, Producer] = {} - def get_transactions_topic(self, project_id: int) -> str: + def get_transactions_topic(self, project_id: int) -> Topic: return self.transactions_topic - def get_producer(self, topic: str) -> Producer: + def get_producer(self, topic: Topic) -> Producer: if topic not in self.__producers: cluster_name = get_topic_definition(topic)["cluster"] cluster_options = get_kafka_producer_cluster_options(cluster_name) @@ -202,9 +202,11 @@ def _send( assert isinstance(extra_data, tuple) + real_topic = get_topic_definition(topic)["real_topic_name"] + try: producer.produce( - topic=topic, + topic=real_topic, key=str(project_id).encode("utf-8") if not skip_semantic_partitioning else None, value=json.dumps((self.EVENT_PROTOCOL_VERSION, _type) + extra_data), on_delivery=self.delivery_callback, diff --git a/src/sentry/issues/attributes.py b/src/sentry/issues/attributes.py index 5e0e699662f779..9b15f831558564 100644 --- a/src/sentry/issues/attributes.py +++ b/src/sentry/issues/attributes.py @@ -6,7 +6,7 @@ import requests import urllib3 -from arroyo import Topic +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from django.conf import settings from django.db.models import F, Window @@ -16,6 +16,7 @@ from sentry_kafka_schemas.schema_types.group_attributes_v1 import GroupAttributesSnapshot from sentry import options +from sentry.conf.types.kafka_definition import Topic from sentry.models.group import Group from sentry.models.groupassignee import GroupAssignee from sentry.models.groupowner import GroupOwner, GroupOwnerType @@ -44,7 +45,7 @@ class GroupValues: def _get_attribute_snapshot_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_GROUP_ATTRIBUTES)["cluster"] + cluster_name = get_topic_definition(Topic.GROUP_ATTRIBUTES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -122,7 +123,7 @@ def produce_snapshot_to_kafka(snapshot: GroupAttributesSnapshot) -> None: raise snuba.SnubaError(err) else: payload = KafkaPayload(None, json.dumps(snapshot).encode("utf-8"), []) - _attribute_snapshot_producer.produce(Topic(settings.KAFKA_GROUP_ATTRIBUTES), payload) + _attribute_snapshot_producer.produce(ArroyoTopic(settings.KAFKA_GROUP_ATTRIBUTES), payload) def _retrieve_group_values(group_id: int) -> GroupValues: diff --git a/src/sentry/issues/producer.py b/src/sentry/issues/producer.py index 5acfef85adcf18..8cd499bbecae72 100644 --- a/src/sentry/issues/producer.py +++ b/src/sentry/issues/producer.py @@ -4,11 +4,12 @@ from collections.abc import MutableMapping from typing import Any, cast -from arroyo import Topic +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from arroyo.types import Message, Value from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.issues.issue_occurrence import IssueOccurrence from sentry.issues.run import process_message from sentry.issues.status_change_message import StatusChangeMessage @@ -33,7 +34,7 @@ class PayloadType(ValueEqualityEnum): def _get_occurrence_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_INGEST_OCCURRENCES)["cluster"] + cluster_name = get_topic_definition(Topic.INGEST_OCCURRENCES)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -68,7 +69,7 @@ def produce_occurrence_to_kafka( process_message(Message(Value(payload=payload, committable={}))) return - _occurrence_producer.produce(Topic(settings.KAFKA_INGEST_OCCURRENCES), payload) + _occurrence_producer.produce(ArroyoTopic(settings.KAFKA_INGEST_OCCURRENCES), payload) def _prepare_occurrence_message( diff --git a/src/sentry/monitors/tasks.py b/src/sentry/monitors/tasks.py index 79f86b62a7f404..82fd558235d5a5 100644 --- a/src/sentry/monitors/tasks.py +++ b/src/sentry/monitors/tasks.py @@ -7,11 +7,13 @@ import msgpack import sentry_sdk -from arroyo import Partition, Topic +from arroyo import Partition +from arroyo import Topic as ArroyoTopic from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration from confluent_kafka.admin import AdminClient, PartitionMetadata from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.constants import ObjectStatus from sentry.monitors.logic.mark_failed import mark_failed from sentry.monitors.schedule import get_prev_schedule @@ -50,7 +52,7 @@ def _get_producer() -> KafkaProducer: - cluster_name = get_topic_definition(settings.KAFKA_INGEST_MONITORS)["cluster"] + cluster_name = get_topic_definition(Topic.INGEST_MONITORS)["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer_config.pop("compression.type", None) producer_config.pop("message.max.bytes", None) @@ -62,10 +64,10 @@ def _get_producer() -> KafkaProducer: @lru_cache(maxsize=None) def _get_partitions() -> Mapping[int, PartitionMetadata]: - topic = settings.KAFKA_INGEST_MONITORS - cluster_name = get_topic_definition(topic)["cluster"] + topic_defn = get_topic_definition(Topic.INGEST_MONITORS) + topic = topic_defn["real_topic_name"] - conf = get_kafka_admin_cluster_options(cluster_name) + conf = get_kafka_admin_cluster_options(topic_defn["cluster"]) admin_client = AdminClient(conf) result = admin_client.list_topics(topic) topic_metadata = result.topics.get(topic) @@ -203,7 +205,7 @@ def clock_pulse(current_datetime=None): # topic. This is a requirement to ensure that none of the partitions stall, # since the global clock is tied to the slowest partition. for partition in _get_partitions().values(): - dest = Partition(Topic(settings.KAFKA_INGEST_MONITORS), partition.id) + dest = Partition(ArroyoTopic(settings.KAFKA_INGEST_MONITORS), partition.id) _checkin_producer.produce(dest, payload) diff --git a/src/sentry/replays/lib/kafka.py b/src/sentry/replays/lib/kafka.py index 26ab2368e649cc..2bde967b5faf01 100644 --- a/src/sentry/replays/lib/kafka.py +++ b/src/sentry/replays/lib/kafka.py @@ -1,5 +1,4 @@ -from django.conf import settings - +from sentry.conf.types.kafka_definition import Topic from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition from sentry.utils.pubsub import KafkaPublisher @@ -10,7 +9,7 @@ def initialize_replays_publisher(is_async=False) -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) + config = get_topic_definition(Topic.INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( get_kafka_producer_cluster_options(config["cluster"]), asynchronous=is_async, diff --git a/src/sentry/replays/usecases/ingest/dom_index.py b/src/sentry/replays/usecases/ingest/dom_index.py index 80b626bb20c987..763162f521e3fe 100644 --- a/src/sentry/replays/usecases/ingest/dom_index.py +++ b/src/sentry/replays/usecases/ingest/dom_index.py @@ -8,9 +8,8 @@ from hashlib import md5 from typing import Any, Literal, TypedDict, cast -from django.conf import settings - from sentry import features +from sentry.conf.types.kafka_definition import Topic from sentry.models.project import Project from sentry.replays.usecases.ingest.events import SentryEvent from sentry.replays.usecases.ingest.issue_creation import ( @@ -219,7 +218,7 @@ def _initialize_publisher() -> KafkaPublisher: global replay_publisher if replay_publisher is None: - config = kafka_config.get_topic_definition(settings.KAFKA_INGEST_REPLAY_EVENTS) + config = kafka_config.get_topic_definition(Topic.INGEST_REPLAY_EVENTS) replay_publisher = KafkaPublisher( kafka_config.get_kafka_producer_cluster_options(config["cluster"]) ) diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index 038a66bfa72238..6e00c6890af293 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -366,12 +366,11 @@ def devserver( from sentry.conf.types.kafka_definition import Topic from sentry.utils.batching_kafka_consumer import create_topics + from sentry.utils.kafka_config import get_topic_definition for topic in Topic: - default_name = topic.value - physical_name = settings.KAFKA_TOPIC_OVERRIDES.get(default_name, default_name) - cluster_name = settings.KAFKA_TOPIC_TO_CLUSTER[default_name] - create_topics(cluster_name, [physical_name]) + topic_defn = get_topic_definition(topic) + create_topics(topic_defn["cluster"], [topic_defn["real_topic_name"]]) if dev_consumer: daemons.append( diff --git a/src/sentry/sentry_metrics/configuration.py b/src/sentry/sentry_metrics/configuration.py index eddebed13a3220..a885712f379d64 100644 --- a/src/sentry/sentry_metrics/configuration.py +++ b/src/sentry/sentry_metrics/configuration.py @@ -10,6 +10,8 @@ import sentry_sdk +from sentry.conf.types.kafka_definition import Topic + # The maximum length of a column that is indexed in postgres. It is important to keep this in # sync between the consumers and the models defined in src/sentry/sentry_metrics/models.py MAX_INDEXED_COLUMN_LENGTH = 200 @@ -46,7 +48,7 @@ class MetricsIngestConfiguration: db_backend: IndexerStorage db_backend_options: Mapping[str, Any] input_topic: str - output_topic: str + output_topic: Topic use_case_id: UseCaseKey internal_metrics_tag: str | None writes_limiter_cluster_options: Mapping[str, Any] @@ -79,7 +81,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_METRICS, - output_topic=settings.KAFKA_SNUBA_METRICS, + output_topic=Topic.SNUBA_METRICS, use_case_id=UseCaseKey.RELEASE_HEALTH, internal_metrics_tag="release-health", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS, @@ -96,7 +98,7 @@ def get_ingest_config( db_backend=IndexerStorage.POSTGRES, db_backend_options={}, input_topic=settings.KAFKA_INGEST_PERFORMANCE_METRICS, - output_topic=settings.KAFKA_SNUBA_GENERIC_METRICS, + output_topic=Topic.SNUBA_GENERIC_METRICS, use_case_id=UseCaseKey.PERFORMANCE, internal_metrics_tag="perf", writes_limiter_cluster_options=settings.SENTRY_METRICS_INDEXER_WRITES_LIMITER_OPTIONS_PERFORMANCE, diff --git a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py index dd56520a20f521..4dbd6a27f54d01 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py +++ b/src/sentry/sentry_metrics/consumers/indexer/multiprocess.py @@ -10,6 +10,7 @@ from arroyo.types import Commit, FilteredPayload, Message, Partition from confluent_kafka import Producer +from sentry.conf.types.kafka_definition import Topic from sentry.utils import kafka_config, metrics logger = logging.getLogger(__name__) @@ -18,7 +19,7 @@ class SimpleProduceStep(ProcessingStep[KafkaPayload]): def __init__( self, - output_topic: str, + output_topic: Topic, commit_function: Commit, producer: AbstractProducer[KafkaPayload] | None = None, ) -> None: @@ -26,7 +27,7 @@ def __init__( self.__producer = Producer( kafka_config.get_kafka_producer_cluster_options(snuba_metrics["cluster"]), ) - self.__producer_topic = output_topic + self.__producer_topic = snuba_metrics["real_topic_name"] self.__commit_function = commit_function self.__closed = False diff --git a/src/sentry/usage_accountant/accountant.py b/src/sentry/usage_accountant/accountant.py index 2ecf3c49f75c03..ee1e98a8c9cc8f 100644 --- a/src/sentry/usage_accountant/accountant.py +++ b/src/sentry/usage_accountant/accountant.py @@ -12,9 +12,9 @@ from arroyo.backends.abstract import Producer from arroyo.backends.kafka import KafkaPayload, KafkaProducer, build_kafka_configuration -from django.conf import settings from usageaccountant import UsageAccumulator, UsageUnit +from sentry.conf.types.kafka_definition import Topic from sentry.options import get from sentry.utils.kafka_config import get_kafka_producer_cluster_options, get_topic_definition @@ -71,7 +71,7 @@ def record( if _accountant_backend is None: cluster_name = get_topic_definition( - settings.KAFKA_SHARED_RESOURCES_USAGE, + Topic.SHARED_RESOURCES_USAGE, )["cluster"] producer_config = get_kafka_producer_cluster_options(cluster_name) producer = KafkaProducer( diff --git a/src/sentry/utils/kafka_config.py b/src/sentry/utils/kafka_config.py index 2ca53a67bf3a47..93e3c4fc87a126 100644 --- a/src/sentry/utils/kafka_config.py +++ b/src/sentry/utils/kafka_config.py @@ -3,6 +3,7 @@ from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.conf.types.topic_definition import TopicDefinition SUPPORTED_KAFKA_CONFIGURATION = ( @@ -96,9 +97,8 @@ def get_kafka_admin_cluster_options( ) -def get_topic_definition(topic: str) -> TopicDefinition: - defn = settings.KAFKA_TOPICS.get(topic) - if defn is not None: - return defn - else: - raise ValueError(f"Unknown {topic=}") +def get_topic_definition(topic: Topic) -> TopicDefinition: + return { + "cluster": settings.KAFKA_TOPIC_TO_CLUSTER[topic.value], + "real_topic_name": settings.KAFKA_TOPIC_OVERRIDES.get(topic.value, topic.value), + } diff --git a/src/sentry/utils/outcomes.py b/src/sentry/utils/outcomes.py index 4aa2951b4bc0aa..19774c0a294a0a 100644 --- a/src/sentry/utils/outcomes.py +++ b/src/sentry/utils/outcomes.py @@ -6,6 +6,7 @@ from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.constants import DataCategory from sentry.utils import json, kafka_config, metrics from sentry.utils.dates import to_datetime @@ -72,8 +73,8 @@ def track_outcome( assert isinstance(category, (type(None), DataCategory)) assert isinstance(quantity, int) - outcomes_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES) - billing_config = kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES_BILLING) + outcomes_config = kafka_config.get_topic_definition(Topic.OUTCOMES) + billing_config = kafka_config.get_topic_definition(Topic.OUTCOMES_BILLING) use_billing = outcome.is_billing() diff --git a/tests/sentry/utils/test_outcomes.py b/tests/sentry/utils/test_outcomes.py index 65a476143d05e9..c7f6a479c47f06 100644 --- a/tests/sentry/utils/test_outcomes.py +++ b/tests/sentry/utils/test_outcomes.py @@ -4,6 +4,7 @@ import pytest from django.conf import settings +from sentry.conf.types.kafka_definition import Topic from sentry.utils import json, kafka_config, outcomes from sentry.utils.outcomes import Outcome, track_outcome @@ -79,9 +80,7 @@ def test_track_outcome_default(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == ( - kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], - ) + assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) assert outcomes.outcomes_publisher (topic_name, payload), _ = setup.mock_publisher.return_value.publish.call_args @@ -117,7 +116,7 @@ def test_track_outcome_billing(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == (kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"],) + assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) assert outcomes.outcomes_publisher (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args @@ -136,7 +135,7 @@ def test_track_outcome_billing_topic(setup): settings.KAFKA_TOPICS, { settings.KAFKA_OUTCOMES_BILLING: { - "cluster": kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], + "cluster": kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"], } }, ): @@ -148,9 +147,7 @@ def test_track_outcome_billing_topic(setup): ) cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args - assert cluster_args == ( - kafka_config.get_topic_definition(settings.KAFKA_OUTCOMES)["cluster"], - ) + assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],) assert outcomes.outcomes_publisher (topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args @@ -164,9 +161,7 @@ def test_track_outcome_billing_cluster(settings, setup): Checks that outcomes are routed to the dedicated cluster and topic. """ - with mock.patch.dict( - settings.KAFKA_TOPICS, {settings.KAFKA_OUTCOMES_BILLING: {"cluster": "different"}} - ): + with mock.patch.dict(settings.KAFKA_TOPIC_TO_CLUSTER, {"outcomes-billing": "different"}): track_outcome( org_id=1, project_id=1, diff --git a/tests/snuba/incidents/test_tasks.py b/tests/snuba/incidents/test_tasks.py index 9354d9bb79c0e9..3bb5185f072757 100644 --- a/tests/snuba/incidents/test_tasks.py +++ b/tests/snuba/incidents/test_tasks.py @@ -8,6 +8,7 @@ from django.conf import settings from django.core import mail +from sentry.conf.types.kafka_definition import Topic from sentry.incidents.action_handlers import ( EmailActionHandler, generate_incident_trigger_email_context, @@ -40,7 +41,7 @@ class HandleSnubaQueryUpdateTest(TestCase): def setUp(self): super().setUp() - self.topic = "metrics-subscription-results" + self.topic = Topic.METRICS_SUBSCRIPTIONS_RESULTS self.orig_registry = deepcopy(subscriber_registry) cluster_options = kafka_config.get_kafka_admin_cluster_options( @@ -48,15 +49,18 @@ def setUp(self): ) self.admin_client = AdminClient(cluster_options) - kafka_cluster = kafka_config.get_topic_definition(self.topic)["cluster"] - create_topics(kafka_cluster, [self.topic]) + topic_defn = kafka_config.get_topic_definition(self.topic) + self.real_topic = topic_defn["real_topic_name"] + self.cluster = topic_defn["cluster"] + + create_topics(self.cluster, [self.real_topic]) def tearDown(self): super().tearDown() subscriber_registry.clear() subscriber_registry.update(self.orig_registry) - self.admin_client.delete_topics([self.topic]) + self.admin_client.delete_topics([self.real_topic]) metrics._metrics_backend = None @cached_property @@ -93,9 +97,8 @@ def action(self): @cached_property def producer(self): - cluster_name = kafka_config.get_topic_definition(self.topic)["cluster"] conf = { - "bootstrap.servers": settings.KAFKA_CLUSTERS[cluster_name]["common"][ + "bootstrap.servers": settings.KAFKA_CLUSTERS[self.cluster]["common"][ "bootstrap.servers" ], "session.timeout.ms": 6000, @@ -129,7 +132,7 @@ def run_test(self, consumer): "timestamp": "2020-01-01T01:23:45.1234", }, } - self.producer.produce(self.topic, json.dumps(message)) + self.producer.produce(self.real_topic, json.dumps(message)) self.producer.flush() def active_incident(): From 8fd098382e265329a39cb2e3c2e53bb8f8f690bb Mon Sep 17 00:00:00 2001 From: Scott Cooper Date: Tue, 5 Mar 2024 16:44:17 -0800 Subject: [PATCH 080/145] test(ui): Add mock date test helpers, remove fake timers (#66342) --- .../modals/widgetViewerModal.spec.tsx | 5 +++-- .../replays/playerDOMAlert.spec.tsx | 20 +++++++++++-------- static/app/utils/metrics/index.spec.tsx | 14 ++++++------- static/app/utils/useDismissAlert.spec.tsx | 19 +++++++++--------- .../rules/metric/details/utils.spec.tsx | 12 +++++------ .../widgetBuilderDataset.spec.tsx | 15 +++++++------- .../widgetCard/releaseWidgetQueries.spec.tsx | 10 +++++----- .../groupReplays/groupReplays.spec.tsx | 7 ++++++- .../transactionReplays/index.spec.tsx | 4 +++- tests/js/sentry-test/utils.tsx | 15 ++++++++++++++ tests/js/setup.ts | 6 +++--- 11 files changed, 78 insertions(+), 49 deletions(-) diff --git a/static/app/components/modals/widgetViewerModal.spec.tsx b/static/app/components/modals/widgetViewerModal.spec.tsx index 867fbcf35706f9..5438574fdb5c90 100644 --- a/static/app/components/modals/widgetViewerModal.spec.tsx +++ b/static/app/components/modals/widgetViewerModal.spec.tsx @@ -4,6 +4,7 @@ import {ProjectFixture} from 'sentry-fixture/project'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {act, render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import type {ModalRenderProps} from 'sentry/actionCreators/modal'; import WidgetViewerModal from 'sentry/components/modals/widgetViewerModal'; @@ -1269,7 +1270,7 @@ describe('Modals -> WidgetViewerModal', function () { widgetType: WidgetType.RELEASE, }; beforeEach(function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); metricsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsTotalCountByReleaseIn24h(), @@ -1281,7 +1282,7 @@ describe('Modals -> WidgetViewerModal', function () { }); }); afterEach(() => { - jest.useRealTimers(); + resetMockDate(); }); it('does a sessions query', async function () { diff --git a/static/app/components/replays/playerDOMAlert.spec.tsx b/static/app/components/replays/playerDOMAlert.spec.tsx index cd8ed860b59810..6e31c67fde9325 100644 --- a/static/app/components/replays/playerDOMAlert.spec.tsx +++ b/static/app/components/replays/playerDOMAlert.spec.tsx @@ -1,20 +1,23 @@ -import {render, screen} from 'sentry-test/reactTestingLibrary'; +import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import localStorage from 'sentry/utils/localStorage'; import PlayerDOMAlert from './playerDOMAlert'; jest.mock('sentry/utils/localStorage'); -jest.useFakeTimers(); const mockGetItem = jest.mocked(localStorage.getItem); const now = new Date('2020-01-01'); -jest.setSystemTime(now); describe('PlayerDOMAlert', () => { beforeEach(() => { mockGetItem.mockReset(); + setMockDate(now); + }); + afterEach(() => { + resetMockDate(); }); it('should render the alert when local storage key is not set', () => { @@ -30,18 +33,19 @@ describe('PlayerDOMAlert', () => { expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument(); }); - it('should be dismissable', () => { + it('should be dismissable', async () => { render(); expect(screen.getByTestId('player-dom-alert')).toBeVisible(); screen.getByLabelText('Close Alert').click(); - jest.runAllTicks(); expect(screen.queryByTestId('player-dom-alert')).not.toBeInTheDocument(); - expect(localStorage.setItem).toHaveBeenCalledWith( - 'replay-player-dom-alert-dismissed', - '"1577836800000"' + await waitFor(() => + expect(localStorage.setItem).toHaveBeenCalledWith( + 'replay-player-dom-alert-dismissed', + '"1577836800000"' + ) ); }); }); diff --git a/static/app/utils/metrics/index.spec.tsx b/static/app/utils/metrics/index.spec.tsx index 421e3b4abcaef1..d5bce4a368dea5 100644 --- a/static/app/utils/metrics/index.spec.tsx +++ b/static/app/utils/metrics/index.spec.tsx @@ -1,3 +1,5 @@ +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + import type {MetricsOperation} from 'sentry/types'; import { getAbsoluteDateTimeRange, @@ -121,9 +123,11 @@ describe('isFormattedMQL', () => { }); describe('getAbsoluteDateTimeRange', () => { - beforeAll(() => { - jest.useFakeTimers(); - jest.setSystemTime(new Date('2024-01-01T00:00:00Z')); + beforeEach(() => { + setMockDate(new Date('2024-01-01T00:00:00Z')); + }); + afterEach(() => { + resetMockDate(); }); it('should return the correct object with "start" and "end" when period is not provided', () => { @@ -150,8 +154,4 @@ describe('getAbsoluteDateTimeRange', () => { end: '2024-01-01T00:00:00.000Z', }); }); - - afterAll(() => { - jest.useRealTimers(); - }); }); diff --git a/static/app/utils/useDismissAlert.spec.tsx b/static/app/utils/useDismissAlert.spec.tsx index 00757ab374adea..ee63c3fc5ba1bf 100644 --- a/static/app/utils/useDismissAlert.spec.tsx +++ b/static/app/utils/useDismissAlert.spec.tsx @@ -1,10 +1,10 @@ import {reactHooks} from 'sentry-test/reactTestingLibrary'; +import {setMockDate} from 'sentry-test/utils'; import localStorage from 'sentry/utils/localStorage'; import useDismissAlert from 'sentry/utils/useDismissAlert'; jest.mock('sentry/utils/localStorage'); -jest.useFakeTimers(); const mockSetItem = jest.mocked(localStorage.setItem); const mockGetItem = jest.mocked(localStorage.getItem); @@ -14,7 +14,7 @@ const now = new Date('2020-01-01'); describe('useDismissAlert', () => { beforeEach(() => { - jest.setSystemTime(now); + setMockDate(now); mockSetItem.mockReset(); mockGetItem.mockReset(); @@ -53,25 +53,26 @@ describe('useDismissAlert', () => { expect(result.current.isDismissed).toBeTruthy(); }); - it('should set the current timestamp into localstorage when an alert is dismissed', () => { - const {result} = reactHooks.renderHook(useDismissAlert, { + it('should set the current timestamp into localstorage when an alert is dismissed', async () => { + const {result, waitFor} = reactHooks.renderHook(useDismissAlert, { initialProps: {key}, }); reactHooks.act(() => { result.current.dismiss(); - jest.runAllTicks(); }); - expect(mockSetItem).toHaveBeenCalledWith( - key, - JSON.stringify(now.getTime().toString()) + await waitFor(() => + expect(mockSetItem).toHaveBeenCalledWith( + key, + JSON.stringify(now.getTime().toString()) + ) ); }); it('should be dismissed if the timestamp in localStorage is older than the expiration', () => { const today = new Date('2020-01-01'); - jest.setSystemTime(today); + setMockDate(today); // Dismissed on christmas const christmas = new Date('2019-12-25').getTime(); diff --git a/static/app/views/alerts/rules/metric/details/utils.spec.tsx b/static/app/views/alerts/rules/metric/details/utils.spec.tsx index f013d088220bf3..f4a71c1e6850e0 100644 --- a/static/app/views/alerts/rules/metric/details/utils.spec.tsx +++ b/static/app/views/alerts/rules/metric/details/utils.spec.tsx @@ -1,18 +1,18 @@ -import MockDate from 'mockdate'; import moment from 'moment'; import {IncidentFixture} from 'sentry-fixture/incident'; import {MetricRuleFixture} from 'sentry-fixture/metricRule'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + import {buildMetricGraphDateRange} from 'sentry/views/alerts/rules/metric/details/utils'; describe('buildMetricGraphDateRange', () => { const now = '2022-05-16T20:00:00'; - beforeAll(() => { - MockDate.set(`${now}Z`); + beforeEach(() => { + setMockDate(new Date(`${now}Z`)); }); - afterAll(() => { - // reset mock date - MockDate.set(new Date(1508208080000)); + afterEach(() => { + resetMockDate(); }); it('should use current date for an active alert', () => { diff --git a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx index bf345373c17f65..a538c7e999bf17 100644 --- a/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx +++ b/static/app/views/dashboards/widgetBuilder/widgetBuilderDataset.spec.tsx @@ -12,6 +12,7 @@ import { waitFor, within, } from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import ProjectsStore from 'sentry/stores/projectsStore'; import TagStore from 'sentry/stores/tagStore'; @@ -270,7 +271,7 @@ describe('WidgetBuilder', function () { afterEach(function () { MockApiClient.clearMockResponses(); jest.clearAllMocks(); - jest.useRealTimers(); + resetMockDate(); }); describe('Release Widgets', function () { @@ -362,7 +363,7 @@ describe('WidgetBuilder', function () { }); it('does not allow sort on tags except release', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -403,7 +404,7 @@ describe('WidgetBuilder', function () { }); it('makes the appropriate sessions call', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -435,7 +436,7 @@ describe('WidgetBuilder', function () { }); it('calls the session endpoint with the right limit', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -473,7 +474,7 @@ describe('WidgetBuilder', function () { }); it('calls sessions api when session.status is selected as a groupby', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); expect( @@ -532,7 +533,7 @@ describe('WidgetBuilder', function () { }); it('sets widgetType to release', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); await userEvent.click(await screen.findByText('Releases (Sessions, Crash rates)'), { @@ -604,7 +605,7 @@ describe('WidgetBuilder', function () { }); it('adds a function when the only column chosen in a table is a tag', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); + setMockDate(new Date('2022-08-02')); renderTestComponent(); await userEvent.click(await screen.findByText('Releases (Sessions, Crash rates)'), { diff --git a/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx b/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx index cb6fcd8ad8fc02..f3aeac2860e8f6 100644 --- a/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx +++ b/static/app/views/dashboards/widgetCard/releaseWidgetQueries.spec.tsx @@ -6,6 +6,7 @@ import {SessionsFieldFixture} from 'sentry-fixture/sessions'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import { DashboardFilterKeys, @@ -72,12 +73,15 @@ describe('Dashboards > ReleaseWidgetQueries', function () { const api = new MockApiClient(); + beforeEach(function () { + setMockDate(new Date('2022-08-02')); + }); afterEach(function () { MockApiClient.clearMockResponses(); + resetMockDate(); }); it('can send chart requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsFieldFixture(`session.all`), @@ -459,7 +463,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send table requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsSessionUserCountByStatusByReleaseFixture(), @@ -559,7 +562,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send big number requests', async function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: MetricsFieldFixture(`count_unique(sentry.sessions.user)`), @@ -605,7 +607,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('can send multiple API requests', function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const metricsMock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: SessionsFieldFixture(`session.all`), @@ -700,7 +701,6 @@ describe('Dashboards > ReleaseWidgetQueries', function () { }); it('adjusts interval based on date window', function () { - jest.useFakeTimers().setSystemTime(new Date('2022-08-02')); const mock = MockApiClient.addMockResponse({ url: '/organizations/org-slug/metrics/data/', body: SessionsFieldFixture(`session.all`), diff --git a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx index 4d157dedc3f8dc..85e489eca47bfc 100644 --- a/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx +++ b/static/app/views/issueDetails/groupReplays/groupReplays.spec.tsx @@ -22,6 +22,8 @@ type InitializeOrgProps = { }; import {ReplayListFixture} from 'sentry-fixture/replayList'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; + const REPLAY_ID_1 = '346789a703f6454384f1de473b8b9fcc'; const REPLAY_ID_2 = 'b05dae9b6be54d21a4d5ad9f8f02b780'; @@ -61,6 +63,9 @@ describe('GroupReplays', () => { body: [], }); }); + afterEach(() => { + resetMockDate(); + }); describe('Replay Feature Disabled', () => { const mockGroup = GroupFixture(); @@ -330,7 +335,7 @@ describe('GroupReplays', () => { }); // Mock the system date to be 2022-09-28 - jest.useFakeTimers().setSystemTime(new Date('Sep 28, 2022 11:29:13 PM UTC')); + setMockDate(new Date('Sep 28, 2022 11:29:13 PM UTC')); render(, { context: routerContext, diff --git a/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx b/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx index 604f32d8c11d42..211c8d52113be6 100644 --- a/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx +++ b/static/app/views/performance/transactionSummary/transactionReplays/index.spec.tsx @@ -3,6 +3,7 @@ import {ReplayListFixture} from 'sentry-fixture/replayList'; import {initializeOrg} from 'sentry-test/initializeOrg'; import {render, screen, waitFor} from 'sentry-test/reactTestingLibrary'; +import {resetMockDate, setMockDate} from 'sentry-test/utils'; import ProjectsStore from 'sentry/stores/projectsStore'; import { @@ -126,6 +127,7 @@ describe('TransactionReplays', () => { afterEach(() => { MockApiClient.clearMockResponses(); + resetMockDate(); }); it('should query the events endpoint for replayIds of a transaction', async () => { @@ -239,7 +241,7 @@ describe('TransactionReplays', () => { }); // Mock the system date to be 2022-09-28 - jest.useFakeTimers().setSystemTime(new Date('Sep 28, 2022 11:29:13 PM UTC')); + setMockDate(new Date('Sep 28, 2022 11:29:13 PM UTC')); renderComponent(); diff --git a/tests/js/sentry-test/utils.tsx b/tests/js/sentry-test/utils.tsx index 09055650135780..17a1489e7552c4 100644 --- a/tests/js/sentry-test/utils.tsx +++ b/tests/js/sentry-test/utils.tsx @@ -1,3 +1,5 @@ +import MockDate from 'mockdate'; + // Taken from https://stackoverflow.com/a/56859650/1015027 function findTextWithMarkup(contentNode: null | Element, textMatch: string | RegExp) { const hasText = (node: Element): boolean => { @@ -27,3 +29,16 @@ export function textWithMarkupMatcher(textMatch: string | RegExp) { return findTextWithMarkup(element, textMatch); }; } + +export function setMockDate(date: Date | number) { + MockDate.set(date); +} + +/** + * Mock (current) date to always be National Pasta Day + * 2017-10-17T02:41:20.000Z + */ +export function resetMockDate() { + const constantDate = new Date(1508208080000); + MockDate.set(constantDate); +} diff --git a/tests/js/setup.ts b/tests/js/setup.ts index f733da1a46d83e..b3195f7e5a6bb5 100644 --- a/tests/js/setup.ts +++ b/tests/js/setup.ts @@ -1,10 +1,11 @@ /* eslint-env node */ import type {ReactElement} from 'react'; import {configure as configureRtl} from '@testing-library/react'; // eslint-disable-line no-restricted-imports -import MockDate from 'mockdate'; import {TextDecoder, TextEncoder} from 'node:util'; import {ConfigFixture} from 'sentry-fixture/config'; +import {resetMockDate} from 'sentry-test/utils'; + // eslint-disable-next-line jest/no-mocks-import import type {Client} from 'sentry/__mocks__/api'; import ConfigStore from 'sentry/stores/configStore'; @@ -30,8 +31,7 @@ configureRtl({testIdAttribute: 'data-test-id'}); * Mock (current) date to always be National Pasta Day * 2017-10-17T02:41:20.000Z */ -const constantDate = new Date(1508208080000); -MockDate.set(constantDate); +resetMockDate(); /** * Global testing configuration From dde199c78fc81f288e18c9818a224451b821c71f Mon Sep 17 00:00:00 2001 From: Dan Fuller Date: Tue, 5 Mar 2024 16:57:02 -0800 Subject: [PATCH 081/145] chore(crons): Return a better message from deprecated crons ingest apis (#66388) --- src/sentry/monitors/endpoints/base.py | 2 ++ .../monitors/endpoints/monitor_ingest_checkin_details.py | 4 ++-- src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/sentry/monitors/endpoints/base.py b/src/sentry/monitors/endpoints/base.py index 7fad81aa44ccc7..7a219b63cb10f0 100644 --- a/src/sentry/monitors/endpoints/base.py +++ b/src/sentry/monitors/endpoints/base.py @@ -24,6 +24,8 @@ from sentry.monitors.models import CheckInStatus, Monitor, MonitorCheckIn, MonitorEnvironment from sentry.utils.sdk import bind_organization_context, configure_scope +DEPRECATED_INGEST_API_MESSAGE = "We have removed this deprecated API. Please migrate to using DSN instead: https://docs.sentry.io/product/crons/legacy-endpoint-migration/#am-i-using-legacy-endpoints" + class OrganizationMonitorPermission(OrganizationPermission): scope_map = { diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py index da7486bb298845..3918e78013dd1d 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_details.py @@ -27,7 +27,7 @@ from ... import features from ...api.exceptions import ResourceDoesNotExist -from .base import MonitorIngestEndpoint +from .base import DEPRECATED_INGEST_API_MESSAGE, MonitorIngestEndpoint @region_silo_endpoint @@ -72,7 +72,7 @@ def put( the most recent (by creation date) check-in which is still mutable (not marked as finished). """ if features.has("organizations:crons-disable-ingest-endpoints", project.organization): - raise ResourceDoesNotExist + raise ResourceDoesNotExist(detail=DEPRECATED_INGEST_API_MESSAGE) if checkin.status in CheckInStatus.FINISHED_VALUES: return self.respond(status=400) diff --git a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py index fff161979939e0..9c4898790e77f1 100644 --- a/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py +++ b/src/sentry/monitors/endpoints/monitor_ingest_checkin_index.py @@ -39,7 +39,7 @@ from sentry.utils import metrics from ...api.exceptions import ResourceDoesNotExist -from .base import MonitorIngestEndpoint +from .base import DEPRECATED_INGEST_API_MESSAGE, MonitorIngestEndpoint logger = logging.getLogger(__name__) @@ -104,7 +104,7 @@ def post( Note: If a DSN is utilized for authentication, the response will be limited in details. """ if features.has("organizations:crons-disable-ingest-endpoints", project.organization): - raise ResourceDoesNotExist + raise ResourceDoesNotExist(detail=DEPRECATED_INGEST_API_MESSAGE) if monitor and monitor.status in [ ObjectStatus.PENDING_DELETION, From 1525c6b0a0721b66ae1b1600d900f9ceaf47c33e Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 6 Mar 2024 06:04:51 +0100 Subject: [PATCH 082/145] ref(ddm): Remove multiple metrics meta queries (#66305) Remove request duplication logic for metrics meta queries to utilize parallelization on the BE and unblock other requests in the browser. --- static/app/types/metrics.tsx | 1 + .../utils/metrics/dashboardImport.spec.tsx | 1 + static/app/utils/metrics/useBlockMetric.tsx | 22 ++--- static/app/utils/metrics/useMetricsMeta.tsx | 80 ++++--------------- 4 files changed, 30 insertions(+), 74 deletions(-) diff --git a/static/app/types/metrics.tsx b/static/app/types/metrics.tsx index c7b342a6744ab1..42648fb5e5bfb1 100644 --- a/static/app/types/metrics.tsx +++ b/static/app/types/metrics.tsx @@ -100,6 +100,7 @@ export type MetricMeta = { // name is returned by the API but should not be used, use parseMRI(mri).name instead // name: string; operations: MetricsOperation[]; + projectIds: number[]; type: MetricType; unit: string; }; diff --git a/static/app/utils/metrics/dashboardImport.spec.tsx b/static/app/utils/metrics/dashboardImport.spec.tsx index 41c2012c6ef828..b711c6b8b76fdf 100644 --- a/static/app/utils/metrics/dashboardImport.spec.tsx +++ b/static/app/utils/metrics/dashboardImport.spec.tsx @@ -47,6 +47,7 @@ const mockAvailableMetrics = (mris: MRI[]): MetricMeta[] => { mri, operations: [], blockingStatus: [], + projectIds: [], })) as MetricMeta[]; }; diff --git a/static/app/utils/metrics/useBlockMetric.tsx b/static/app/utils/metrics/useBlockMetric.tsx index 2ea24fe1ca2db5..72cbe8a31f591d 100644 --- a/static/app/utils/metrics/useBlockMetric.tsx +++ b/static/app/utils/metrics/useBlockMetric.tsx @@ -1,7 +1,6 @@ import {addErrorMessage, addSuccessMessage} from 'sentry/actionCreators/indicator'; import {t} from 'sentry/locale'; import type {MetricMeta, MRI, Project} from 'sentry/types'; -import {getUseCaseFromMRI} from 'sentry/utils/metrics/mri'; import {useMutation, useQueryClient} from 'sentry/utils/queryClient'; import useApi from 'sentry/utils/useApi'; import useOrganization from 'sentry/utils/useOrganization'; @@ -46,21 +45,22 @@ export const useBlockMetric = (project: Project) => { }); }, onSuccess: data => { - const useCase = getUseCaseFromMRI(data.metricMri); - const metaQueryKey = getMetricsMetaQueryKey( - slug, - {projects: [parseInt(project.id, 10)]}, - useCase ?? 'custom' - ); - queryClient.setQueryData( - metaQueryKey, + const metaQueryKey = getMetricsMetaQueryKey(slug, {}); + + // Only match the endpoint, to search in all insances of the query + const queryKeyFilter = {queryKey: [metaQueryKey[0]]}; + + queryClient.setQueriesData( + queryKeyFilter, (oldData: BlockMetricResponse): BlockMetricResponse => { if (!oldData) { return undefined; } const oldMeta = oldData[0]; const index = oldMeta.findIndex( - (metric: {mri: MRI}) => metric.mri === data.metricMri + metric => + metric.mri === data.metricMri && + metric.projectIds.includes(Number(project.id)) ); if (index !== undefined && index !== -1) { @@ -78,7 +78,7 @@ export const useBlockMetric = (project: Project) => { addSuccessMessage(t('Metric updated')); - queryClient.invalidateQueries(metaQueryKey); + queryClient.invalidateQueries(queryKeyFilter); }, onError: () => { addErrorMessage(t('An error occurred while updating the metric')); diff --git a/static/app/utils/metrics/useMetricsMeta.tsx b/static/app/utils/metrics/useMetricsMeta.tsx index aa8655444aed94..dd473684600f69 100644 --- a/static/app/utils/metrics/useMetricsMeta.tsx +++ b/static/app/utils/metrics/useMetricsMeta.tsx @@ -1,6 +1,6 @@ import type {PageFilters} from 'sentry/types'; import {formatMRI, getUseCaseFromMRI} from 'sentry/utils/metrics/mri'; -import type {ApiQueryKey, UseApiQueryOptions} from 'sentry/utils/queryClient'; +import type {ApiQueryKey} from 'sentry/utils/queryClient'; import {useApiQuery} from 'sentry/utils/queryClient'; import useOrganization from 'sentry/utils/useOrganization'; @@ -9,22 +9,12 @@ import type {MetricMeta, MRI, UseCase} from '../../types/metrics'; import {getMetaDateTimeParams} from './index'; const EMPTY_ARRAY: MetricMeta[] = []; -const DEFAULT_USE_CASES = ['sessions', 'transactions', 'custom', 'spans']; - -export function getMetricsMetaQueryKeys( - orgSlug: string, - projects: PageFilters['projects'], - useCases?: UseCase[] -): ApiQueryKey[] { - return ( - useCases?.map(useCase => getMetricsMetaQueryKey(orgSlug, {projects}, useCase)) ?? [] - ); -} +const DEFAULT_USE_CASES: UseCase[] = ['sessions', 'transactions', 'custom', 'spans']; export function getMetricsMetaQueryKey( orgSlug: string, {projects, datetime}: Partial, - useCase: UseCase + useCase?: UseCase[] ): ApiQueryKey { const queryParams = projects?.length ? {useCase, project: projects, ...getMetaDateTimeParams(datetime)} @@ -32,72 +22,36 @@ export function getMetricsMetaQueryKey( return [`/organizations/${orgSlug}/metrics/meta/`, {query: queryParams}]; } -function useMetaUseCase( - useCase: UseCase, - pageFilters: Partial, - options: Omit, 'staleTime'> -) { - const {slug} = useOrganization(); - - const apiQueryResult = useApiQuery( - getMetricsMetaQueryKey(slug, pageFilters, useCase), - { - ...options, - staleTime: 2000, // 2 seconds to cover page load - } - ); - - return apiQueryResult; -} - export function useMetricsMeta( pageFilters: Partial, - useCases?: UseCase[], + useCases: UseCase[] = DEFAULT_USE_CASES, filterBlockedMetrics = true, enabled: boolean = true ): {data: MetricMeta[]; isLoading: boolean} { - const enabledUseCases = useCases ?? DEFAULT_USE_CASES; + const {slug} = useOrganization(); - const {data: sessionMeta = [], ...sessionsReq} = useMetaUseCase( - 'sessions', - pageFilters, + const {data, isLoading} = useApiQuery( + getMetricsMetaQueryKey(slug, pageFilters, useCases), { - enabled: enabled && enabledUseCases.includes('sessions'), + enabled, + staleTime: 2000, // 2 seconds to cover page load } ); - const {data: txnsMeta = [], ...txnsReq} = useMetaUseCase('transactions', pageFilters, { - enabled: enabled && enabledUseCases.includes('transactions'), - }); - const {data: customMeta = [], ...customReq} = useMetaUseCase('custom', pageFilters, { - enabled: enabled && enabledUseCases.includes('custom'), - }); - const {data: spansMeta = [], ...spansReq} = useMetaUseCase('spans', pageFilters, { - enabled: enabled && enabledUseCases.includes('spans'), - }); - const isLoading = - (sessionsReq.isLoading && sessionsReq.fetchStatus !== 'idle') || - (txnsReq.isLoading && txnsReq.fetchStatus !== 'idle') || - (customReq.isLoading && customReq.fetchStatus !== 'idle') || - (spansReq.isLoading && spansReq.fetchStatus !== 'idle'); + if (!data) { + return {data: EMPTY_ARRAY, isLoading}; + } - const data = [ - ...(enabledUseCases.includes('sessions') ? sessionMeta : []), - ...(enabledUseCases.includes('transactions') ? txnsMeta : []), - ...(enabledUseCases.includes('custom') ? customMeta : []), - ...(enabledUseCases.includes('spans') ? spansMeta : []), - ].sort((a, b) => formatMRI(a.mri).localeCompare(formatMRI(b.mri))); + const meta = data.sort((a, b) => formatMRI(a.mri).localeCompare(formatMRI(b.mri))); if (!filterBlockedMetrics) { - return {data, isLoading}; + return {data: meta, isLoading}; } return { - data: isLoading - ? EMPTY_ARRAY - : data.filter(meta => { - return meta.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; - }), + data: data.filter(entry => { + return entry.blockingStatus?.every(({isBlocked}) => !isBlocked) ?? true; + }), isLoading, }; } From 7ff66f2f4a646a92fc4483c3d4368910ee398d89 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 6 Mar 2024 08:39:03 +0100 Subject: [PATCH 083/145] fix(loader-script-setting): Cache update (#66396) --- .../app/views/settings/project/projectKeys/details/index.tsx | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/static/app/views/settings/project/projectKeys/details/index.tsx b/static/app/views/settings/project/projectKeys/details/index.tsx index 4f4423bc8cab75..84c1a692e6da99 100644 --- a/static/app/views/settings/project/projectKeys/details/index.tsx +++ b/static/app/views/settings/project/projectKeys/details/index.tsx @@ -43,9 +43,7 @@ export default function ProjectKeyDetails({organization, params, project}: Props setApiQueryData( queryClient, [`/projects/${organization.slug}/${projectId}/keys/${keyId}/`], - oldData => { - return {...oldData, data}; - } + data ); } From f143d170cf12f9fbe82be4c3193109535c6de30c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 6 Mar 2024 09:49:46 +0100 Subject: [PATCH 084/145] Bump Sentry Python SDK to version 2.0.0a2 (#66397) New PR for another load test to check performance of the new Python SDK 2.0 --------- Co-authored-by: getsantry[bot] <66042841+getsantry[bot]@users.noreply.github.com> --- .github/actions/setup-sentry/action.yml | 20 ++++----- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 4 +- requirements-dev.txt | 3 +- requirements-frozen.txt | 2 +- src/sentry/auth/helper.py | 2 +- src/sentry/cache/base.py | 12 +++--- src/sentry/data_export/tasks.py | 4 +- src/sentry/scim/endpoints/utils.py | 2 +- src/sentry/utils/sdk.py | 29 ++++++------- src/sentry/utils/snuba.py | 6 +-- .../organization_integration_setup.py | 4 +- tests/relay_integration/test_sdk.py | 43 +++++++------------ tests/sentry/utils/test_sdk.py | 6 +-- 14 files changed, 62 insertions(+), 77 deletions(-) diff --git a/.github/actions/setup-sentry/action.yml b/.github/actions/setup-sentry/action.yml index 85a42a7c864949..0bad1db75bd8f5 100644 --- a/.github/actions/setup-sentry/action.yml +++ b/.github/actions/setup-sentry/action.yml @@ -81,16 +81,16 @@ runs: echo "PYTEST_ADDOPTS=--reruns=5 --durations=10 --fail-slow=60s" >> $GITHUB_ENV ### pytest-sentry configuration ### - if [ "$GITHUB_REPOSITORY" = "getsentry/sentry" ]; then - echo "PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079" >> $GITHUB_ENV - echo "PYTEST_SENTRY_TRACES_SAMPLE_RATE=0" >> $GITHUB_ENV - - # This records failures on master to sentry in order to detect flakey tests, as it's - # expected that people have failing tests on their PRs - if [ "$GITHUB_REF" = "refs/heads/master" ]; then - echo "PYTEST_SENTRY_ALWAYS_REPORT=1" >> $GITHUB_ENV - fi - fi + # if [ "$GITHUB_REPOSITORY" = "getsentry/sentry" ]; then + # echo "PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079" >> $GITHUB_ENV + # echo "PYTEST_SENTRY_TRACES_SAMPLE_RATE=0" >> $GITHUB_ENV + + # # This records failures on master to sentry in order to detect flakey tests, as it's + # # expected that people have failing tests on their PRs + # if [ "$GITHUB_REF" = "refs/heads/master" ]; then + # echo "PYTEST_SENTRY_ALWAYS_REPORT=1" >> $GITHUB_ENV + # fi + # fi # Configure a different release version, otherwise it defaults to the # commit sha which will conflict with our actual prod releases. This is a diff --git a/requirements-base.txt b/requirements-base.txt index fa6f922654d677..888773f89c4617 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -66,7 +66,7 @@ sentry-kafka-schemas>=0.1.58 sentry-ophio==0.1.5 sentry-redis-tools>=0.1.7 sentry-relay>=0.8.45 -sentry-sdk>=1.39.2 +sentry-sdk>=2.0.0a2 snuba-sdk>=2.0.29 simplejson>=3.17.6 sqlparse>=0.4.4 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index bde98736cf313d..eaa90e40118aa6 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -145,7 +145,6 @@ pytest-cov==4.0.0 pytest-django==4.8.0 pytest-fail-slow==0.3.0 pytest-rerunfailures==11.0 -pytest-sentry==0.1.11 pytest-xdist==3.0.2 python-dateutil==2.8.2 python-rapidjson==1.8 @@ -181,7 +180,7 @@ sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 sentry-relay==0.8.45 -sentry-sdk==1.39.2 +sentry-sdk==2.0.0a2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 @@ -230,7 +229,6 @@ wcwidth==0.2.10 websocket-client==1.3.2 werkzeug==3.0.1 wheel==0.38.4 -wrapt==1.14.1 wsproto==1.1.0 xmlsec==1.3.13 zstandard==0.18.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index c6339a4e093c43..a8843b659bff0c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,7 +12,8 @@ pytest-cov>=4.0.0 pytest-django>=4.8.0 pytest-fail-slow>=0.3.0 pytest-rerunfailures>=11 -pytest-sentry>=0.1.11 +# Removed because of Python SDK 2.0 (this package does not work with SDK 2.0) +# pytest-sentry>=0.1.11 pytest-xdist>=3 responses>=0.23.1 selenium>=4.16.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index b0ef79fddf89a1..513693989bf004 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -123,7 +123,7 @@ sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 sentry-relay==0.8.45 -sentry-sdk==1.39.2 +sentry-sdk==2.0.0a2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py index ffb386346dfdd4..10991f311cbd12 100644 --- a/src/sentry/auth/helper.py +++ b/src/sentry/auth/helper.py @@ -134,7 +134,7 @@ def user(self) -> User | AnonymousUser: @staticmethod def warn_about_ambiguous_email(email: str, users: Collection[User], chosen_user: User) -> None: with sentry_sdk.push_scope() as scope: - scope.level = "warning" + scope.set_level("warning") scope.set_tag("email", email) scope.set_extra("user_ids", [user.id for user in users]) scope.set_extra("chosen_user", chosen_user.id) diff --git a/src/sentry/cache/base.py b/src/sentry/cache/base.py index 4bb161d6668921..bbbf28ca961316 100644 --- a/src/sentry/cache/base.py +++ b/src/sentry/cache/base.py @@ -46,9 +46,9 @@ def _mark_transaction(self, op): if not self.is_default_cache: return - with sentry_sdk.configure_scope() as scope: - # Do not set this tag if we're in the global scope (which roughly - # equates to having a transaction). - if scope.transaction: - scope.set_tag(f"{op}_default_cache", "true") - scope.set_tag("used_default_cache", "true") + scope = sentry_sdk.Scope.get_current_scope() + # Do not set this tag if we're in the global scope (which roughly + # equates to having a transaction). + if scope.transaction: + sentry_sdk.set_tag(f"{op}_default_cache", "true") + sentry_sdk.set_tag("used_default_cache", "true") diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py index 1736202f51845b..4ba8e93735cc84 100644 --- a/src/sentry/data_export/tasks.py +++ b/src/sentry/data_export/tasks.py @@ -77,7 +77,7 @@ def assemble_download( with sentry_sdk.configure_scope() as scope: if data_export.user_id: user = dict(id=data_export.user_id) - scope.user = user + scope.set_user(user) scope.set_tag("organization.slug", data_export.organization.slug) scope.set_tag("export.type", ExportQueryType.as_str(data_export.query_type)) scope.set_extra("export.query", data_export.query_info) @@ -309,7 +309,7 @@ def merge_export_blobs(data_export_id, **kwargs): with sentry_sdk.configure_scope() as scope: if data_export.user_id: user = dict(id=data_export.user_id) - scope.user = user + scope.set_user(user) scope.set_tag("organization.slug", data_export.organization.slug) scope.set_tag("export.type", ExportQueryType.as_str(data_export.query_type)) scope.set_extra("export.query", data_export.query_info) diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index 9c0bfce38250c4..838a09245daa54 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -20,7 +20,7 @@ class SCIMApiError(APIException): def __init__(self, detail, status_code=400): - transaction = sentry_sdk.Hub.current.scope.transaction + transaction = sentry_sdk.Scope.get_current_scope().transaction if transaction is not None: transaction.set_tag("http.status_code", status_code) self.status_code = status_code diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 5dbba56f16ba72..2fb012eccd1a9c 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -562,21 +562,20 @@ def check_current_scope_transaction( Note: Ignores scope `transaction` values with `source = "custom"`, indicating a value which has been set maunually. """ + scope = sentry_sdk.Scope.get_current_scope() + transaction_from_request = get_transaction_name_from_request(request) - with configure_scope() as scope: - transaction_from_request = get_transaction_name_from_request(request) - - if ( - scope._transaction is not None - and scope._transaction != transaction_from_request - and scope._transaction_info.get("source") != "custom" - ): - return { - "scope_transaction": scope._transaction, - "request_transaction": transaction_from_request, - } - else: - return None + if ( + scope._transaction is not None + and scope._transaction != transaction_from_request + and scope._transaction_info.get("source") != "custom" + ): + return { + "scope_transaction": scope._transaction, + "request_transaction": transaction_from_request, + } + else: + return None def capture_exception_with_scope_check( @@ -674,7 +673,7 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: def set_measurement(measurement_name, value, unit=None): try: - transaction = sentry_sdk.Hub.current.scope.transaction + transaction = sentry_sdk.Scope.get_current_scope().transaction if transaction is not None: transaction.set_measurement(measurement_name, value, unit) except Exception: diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index 337e3d25eecf30..a61ce5fdd0b14d 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -976,9 +976,9 @@ def _bulk_snuba_query( sentry_sdk.set_tag("query.referrer", query_referrer) parent_api: str = "" - with sentry_sdk.configure_scope() as scope: - if scope.transaction: - parent_api = scope.transaction.name + scope = sentry_sdk.Scope.get_current_scope() + if scope.transaction: + parent_api = scope.transaction.name if len(snuba_param_list) > 1: query_results = list( diff --git a/src/sentry/web/frontend/organization_integration_setup.py b/src/sentry/web/frontend/organization_integration_setup.py index 83f6465c44321f..61a5c867e8467d 100644 --- a/src/sentry/web/frontend/organization_integration_setup.py +++ b/src/sentry/web/frontend/organization_integration_setup.py @@ -21,8 +21,8 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False def handle(self, request: Request, organization, provider_id) -> HttpResponseBase: - with sentry_sdk.configure_scope() as scope: - scope.set_transaction_name(f"integration.{provider_id}", source=TRANSACTION_SOURCE_VIEW) + scope = sentry_sdk.Scope.get_current_scope() + scope.set_transaction_name(f"integration.{provider_id}", source=TRANSACTION_SOURCE_VIEW) pipeline = IntegrationPipeline( request=request, organization=organization, provider_key=provider_id diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index e09817076f5362..31a6b3d99cc1d7 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -2,8 +2,9 @@ from unittest import mock import pytest +import sentry_sdk from django.test.utils import override_settings -from sentry_sdk import Hub, push_scope +from sentry_sdk import Hub from sentry import eventstore from sentry.eventstore.models import Event @@ -34,38 +35,22 @@ def post_event_with_sdk(settings, relay_server, wait_for_ingest_consumer): settings.SENTRY_PROJECT = 1 configure_sdk() - hub = Hub.current # XXX: Hub.current gets reset, this is a workaround - def bind_client(self, new, *, _orig=Hub.bind_client): - if new is None: - import sys - import traceback + wait_for_ingest_consumer = wait_for_ingest_consumer(settings) - print("!!! Hub client was reset to None !!!", file=sys.stderr) # noqa: S002 - traceback.print_stack() - print("!!!", file=sys.stderr) # noqa: S002 + def inner(*args, **kwargs): + event_id = sentry_sdk.capture_event(*args, **kwargs) + sentry_sdk.Scope.get_client().flush() - return _orig(self, new) + with sentry_sdk.new_scope(): + return wait_for_ingest_consumer( + lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) + ) - # XXX: trying to figure out why it gets reset - with mock.patch.object(Hub, "bind_client", bind_client): - wait_for_ingest_consumer = wait_for_ingest_consumer(settings) - - def inner(*args, **kwargs): - assert Hub.current.client is not None - - event_id = hub.capture_event(*args, **kwargs) - assert hub.client is not None - hub.client.flush() - - with push_scope(): - return wait_for_ingest_consumer( - lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) - ) - - yield inner + yield inner +@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all @@ -77,6 +62,7 @@ def test_simple(settings, post_event_with_sdk): assert event.data["logentry"]["formatted"] == "internal client test" +@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all @@ -95,6 +81,7 @@ def test_recursion_breaker(settings, post_event_with_sdk): assert_mock_called_once_with_partial(save, settings.SENTRY_PROJECT, cache_key=f"e:{event_id}:1") +@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @django_db_all @override_settings(SENTRY_PROJECT=1) @@ -102,7 +89,7 @@ def test_encoding(settings, post_event_with_sdk): class NotJSONSerializable: pass - with push_scope() as scope: + with sentry_sdk.new_scope() as scope: scope.set_extra("request", NotJSONSerializable()) event = post_event_with_sdk({"message": "check the req"}) diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index e296c859ba108a..010639b20d1f1d 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -223,7 +223,7 @@ def test_scope_has_correct_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/dogs/{name}/" - with patch_configure_scope_with_scope("sentry.utils.sdk.configure_scope", mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch is None @@ -232,7 +232,7 @@ def test_scope_has_wrong_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/tricks/{trick_name}/" - with patch_configure_scope_with_scope("sentry.utils.sdk.configure_scope", mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch == { "scope_transaction": "/tricks/{trick_name}/", @@ -302,7 +302,7 @@ def test_no_scope_data_passed(self, mock_sdk_capture_exception: MagicMock): capture_exception_with_scope_check(Exception()) passed_scope = mock_sdk_capture_exception.call_args.kwargs["scope"] - empty_scope = Scope() + empty_scope = Scope(client=passed_scope.client) for entry in empty_scope.__slots__: # _propagation_context is generated on __init__ for tracing without performance From 83faf436bb911133e5f4800fe9c36b40253cb5ab Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 6 Mar 2024 08:53:50 +0000 Subject: [PATCH 085/145] Revert "Bump Sentry Python SDK to version 2.0.0a2 (#66397)" This reverts commit 51ae02e46645b11c7d2463d8ef0bc6c60e542981. Co-authored-by: antonpirker <202325+antonpirker@users.noreply.github.com> --- .github/actions/setup-sentry/action.yml | 20 ++++----- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 4 +- requirements-dev.txt | 3 +- requirements-frozen.txt | 2 +- src/sentry/auth/helper.py | 2 +- src/sentry/cache/base.py | 12 +++--- src/sentry/data_export/tasks.py | 4 +- src/sentry/scim/endpoints/utils.py | 2 +- src/sentry/utils/sdk.py | 29 +++++++------ src/sentry/utils/snuba.py | 6 +-- .../organization_integration_setup.py | 4 +- tests/relay_integration/test_sdk.py | 43 ++++++++++++------- tests/sentry/utils/test_sdk.py | 6 +-- 14 files changed, 77 insertions(+), 62 deletions(-) diff --git a/.github/actions/setup-sentry/action.yml b/.github/actions/setup-sentry/action.yml index 0bad1db75bd8f5..85a42a7c864949 100644 --- a/.github/actions/setup-sentry/action.yml +++ b/.github/actions/setup-sentry/action.yml @@ -81,16 +81,16 @@ runs: echo "PYTEST_ADDOPTS=--reruns=5 --durations=10 --fail-slow=60s" >> $GITHUB_ENV ### pytest-sentry configuration ### - # if [ "$GITHUB_REPOSITORY" = "getsentry/sentry" ]; then - # echo "PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079" >> $GITHUB_ENV - # echo "PYTEST_SENTRY_TRACES_SAMPLE_RATE=0" >> $GITHUB_ENV - - # # This records failures on master to sentry in order to detect flakey tests, as it's - # # expected that people have failing tests on their PRs - # if [ "$GITHUB_REF" = "refs/heads/master" ]; then - # echo "PYTEST_SENTRY_ALWAYS_REPORT=1" >> $GITHUB_ENV - # fi - # fi + if [ "$GITHUB_REPOSITORY" = "getsentry/sentry" ]; then + echo "PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079" >> $GITHUB_ENV + echo "PYTEST_SENTRY_TRACES_SAMPLE_RATE=0" >> $GITHUB_ENV + + # This records failures on master to sentry in order to detect flakey tests, as it's + # expected that people have failing tests on their PRs + if [ "$GITHUB_REF" = "refs/heads/master" ]; then + echo "PYTEST_SENTRY_ALWAYS_REPORT=1" >> $GITHUB_ENV + fi + fi # Configure a different release version, otherwise it defaults to the # commit sha which will conflict with our actual prod releases. This is a diff --git a/requirements-base.txt b/requirements-base.txt index 888773f89c4617..fa6f922654d677 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -66,7 +66,7 @@ sentry-kafka-schemas>=0.1.58 sentry-ophio==0.1.5 sentry-redis-tools>=0.1.7 sentry-relay>=0.8.45 -sentry-sdk>=2.0.0a2 +sentry-sdk>=1.39.2 snuba-sdk>=2.0.29 simplejson>=3.17.6 sqlparse>=0.4.4 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index eaa90e40118aa6..bde98736cf313d 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -145,6 +145,7 @@ pytest-cov==4.0.0 pytest-django==4.8.0 pytest-fail-slow==0.3.0 pytest-rerunfailures==11.0 +pytest-sentry==0.1.11 pytest-xdist==3.0.2 python-dateutil==2.8.2 python-rapidjson==1.8 @@ -180,7 +181,7 @@ sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 sentry-relay==0.8.45 -sentry-sdk==2.0.0a2 +sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 @@ -229,6 +230,7 @@ wcwidth==0.2.10 websocket-client==1.3.2 werkzeug==3.0.1 wheel==0.38.4 +wrapt==1.14.1 wsproto==1.1.0 xmlsec==1.3.13 zstandard==0.18.0 diff --git a/requirements-dev.txt b/requirements-dev.txt index a8843b659bff0c..c6339a4e093c43 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -12,8 +12,7 @@ pytest-cov>=4.0.0 pytest-django>=4.8.0 pytest-fail-slow>=0.3.0 pytest-rerunfailures>=11 -# Removed because of Python SDK 2.0 (this package does not work with SDK 2.0) -# pytest-sentry>=0.1.11 +pytest-sentry>=0.1.11 pytest-xdist>=3 responses>=0.23.1 selenium>=4.16.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 513693989bf004..b0ef79fddf89a1 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -123,7 +123,7 @@ sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 sentry-relay==0.8.45 -sentry-sdk==2.0.0a2 +sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.16.0 diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py index 10991f311cbd12..ffb386346dfdd4 100644 --- a/src/sentry/auth/helper.py +++ b/src/sentry/auth/helper.py @@ -134,7 +134,7 @@ def user(self) -> User | AnonymousUser: @staticmethod def warn_about_ambiguous_email(email: str, users: Collection[User], chosen_user: User) -> None: with sentry_sdk.push_scope() as scope: - scope.set_level("warning") + scope.level = "warning" scope.set_tag("email", email) scope.set_extra("user_ids", [user.id for user in users]) scope.set_extra("chosen_user", chosen_user.id) diff --git a/src/sentry/cache/base.py b/src/sentry/cache/base.py index bbbf28ca961316..4bb161d6668921 100644 --- a/src/sentry/cache/base.py +++ b/src/sentry/cache/base.py @@ -46,9 +46,9 @@ def _mark_transaction(self, op): if not self.is_default_cache: return - scope = sentry_sdk.Scope.get_current_scope() - # Do not set this tag if we're in the global scope (which roughly - # equates to having a transaction). - if scope.transaction: - sentry_sdk.set_tag(f"{op}_default_cache", "true") - sentry_sdk.set_tag("used_default_cache", "true") + with sentry_sdk.configure_scope() as scope: + # Do not set this tag if we're in the global scope (which roughly + # equates to having a transaction). + if scope.transaction: + scope.set_tag(f"{op}_default_cache", "true") + scope.set_tag("used_default_cache", "true") diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py index 4ba8e93735cc84..1736202f51845b 100644 --- a/src/sentry/data_export/tasks.py +++ b/src/sentry/data_export/tasks.py @@ -77,7 +77,7 @@ def assemble_download( with sentry_sdk.configure_scope() as scope: if data_export.user_id: user = dict(id=data_export.user_id) - scope.set_user(user) + scope.user = user scope.set_tag("organization.slug", data_export.organization.slug) scope.set_tag("export.type", ExportQueryType.as_str(data_export.query_type)) scope.set_extra("export.query", data_export.query_info) @@ -309,7 +309,7 @@ def merge_export_blobs(data_export_id, **kwargs): with sentry_sdk.configure_scope() as scope: if data_export.user_id: user = dict(id=data_export.user_id) - scope.set_user(user) + scope.user = user scope.set_tag("organization.slug", data_export.organization.slug) scope.set_tag("export.type", ExportQueryType.as_str(data_export.query_type)) scope.set_extra("export.query", data_export.query_info) diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index 838a09245daa54..9c0bfce38250c4 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -20,7 +20,7 @@ class SCIMApiError(APIException): def __init__(self, detail, status_code=400): - transaction = sentry_sdk.Scope.get_current_scope().transaction + transaction = sentry_sdk.Hub.current.scope.transaction if transaction is not None: transaction.set_tag("http.status_code", status_code) self.status_code = status_code diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 2fb012eccd1a9c..5dbba56f16ba72 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -562,20 +562,21 @@ def check_current_scope_transaction( Note: Ignores scope `transaction` values with `source = "custom"`, indicating a value which has been set maunually. """ - scope = sentry_sdk.Scope.get_current_scope() - transaction_from_request = get_transaction_name_from_request(request) - if ( - scope._transaction is not None - and scope._transaction != transaction_from_request - and scope._transaction_info.get("source") != "custom" - ): - return { - "scope_transaction": scope._transaction, - "request_transaction": transaction_from_request, - } - else: - return None + with configure_scope() as scope: + transaction_from_request = get_transaction_name_from_request(request) + + if ( + scope._transaction is not None + and scope._transaction != transaction_from_request + and scope._transaction_info.get("source") != "custom" + ): + return { + "scope_transaction": scope._transaction, + "request_transaction": transaction_from_request, + } + else: + return None def capture_exception_with_scope_check( @@ -673,7 +674,7 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: def set_measurement(measurement_name, value, unit=None): try: - transaction = sentry_sdk.Scope.get_current_scope().transaction + transaction = sentry_sdk.Hub.current.scope.transaction if transaction is not None: transaction.set_measurement(measurement_name, value, unit) except Exception: diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index a61ce5fdd0b14d..337e3d25eecf30 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -976,9 +976,9 @@ def _bulk_snuba_query( sentry_sdk.set_tag("query.referrer", query_referrer) parent_api: str = "" - scope = sentry_sdk.Scope.get_current_scope() - if scope.transaction: - parent_api = scope.transaction.name + with sentry_sdk.configure_scope() as scope: + if scope.transaction: + parent_api = scope.transaction.name if len(snuba_param_list) > 1: query_results = list( diff --git a/src/sentry/web/frontend/organization_integration_setup.py b/src/sentry/web/frontend/organization_integration_setup.py index 61a5c867e8467d..83f6465c44321f 100644 --- a/src/sentry/web/frontend/organization_integration_setup.py +++ b/src/sentry/web/frontend/organization_integration_setup.py @@ -21,8 +21,8 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False def handle(self, request: Request, organization, provider_id) -> HttpResponseBase: - scope = sentry_sdk.Scope.get_current_scope() - scope.set_transaction_name(f"integration.{provider_id}", source=TRANSACTION_SOURCE_VIEW) + with sentry_sdk.configure_scope() as scope: + scope.set_transaction_name(f"integration.{provider_id}", source=TRANSACTION_SOURCE_VIEW) pipeline = IntegrationPipeline( request=request, organization=organization, provider_key=provider_id diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index 31a6b3d99cc1d7..e09817076f5362 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -2,9 +2,8 @@ from unittest import mock import pytest -import sentry_sdk from django.test.utils import override_settings -from sentry_sdk import Hub +from sentry_sdk import Hub, push_scope from sentry import eventstore from sentry.eventstore.models import Event @@ -35,22 +34,38 @@ def post_event_with_sdk(settings, relay_server, wait_for_ingest_consumer): settings.SENTRY_PROJECT = 1 configure_sdk() + hub = Hub.current # XXX: Hub.current gets reset, this is a workaround - wait_for_ingest_consumer = wait_for_ingest_consumer(settings) + def bind_client(self, new, *, _orig=Hub.bind_client): + if new is None: + import sys + import traceback - def inner(*args, **kwargs): - event_id = sentry_sdk.capture_event(*args, **kwargs) - sentry_sdk.Scope.get_client().flush() + print("!!! Hub client was reset to None !!!", file=sys.stderr) # noqa: S002 + traceback.print_stack() + print("!!!", file=sys.stderr) # noqa: S002 - with sentry_sdk.new_scope(): - return wait_for_ingest_consumer( - lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) - ) + return _orig(self, new) - yield inner + # XXX: trying to figure out why it gets reset + with mock.patch.object(Hub, "bind_client", bind_client): + wait_for_ingest_consumer = wait_for_ingest_consumer(settings) + + def inner(*args, **kwargs): + assert Hub.current.client is not None + + event_id = hub.capture_event(*args, **kwargs) + assert hub.client is not None + hub.client.flush() + + with push_scope(): + return wait_for_ingest_consumer( + lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) + ) + + yield inner -@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all @@ -62,7 +77,6 @@ def test_simple(settings, post_event_with_sdk): assert event.data["logentry"]["formatted"] == "internal client test" -@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @override_settings(SENTRY_PROJECT=1) @django_db_all @@ -81,7 +95,6 @@ def test_recursion_breaker(settings, post_event_with_sdk): assert_mock_called_once_with_partial(save, settings.SENTRY_PROJECT, cache_key=f"e:{event_id}:1") -@pytest.mark.skip(reason="Deactivate to test SDK 2.0") @no_silo_test @django_db_all @override_settings(SENTRY_PROJECT=1) @@ -89,7 +102,7 @@ def test_encoding(settings, post_event_with_sdk): class NotJSONSerializable: pass - with sentry_sdk.new_scope() as scope: + with push_scope() as scope: scope.set_extra("request", NotJSONSerializable()) event = post_event_with_sdk({"message": "check the req"}) diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 010639b20d1f1d..e296c859ba108a 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -223,7 +223,7 @@ def test_scope_has_correct_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/dogs/{name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch_configure_scope_with_scope("sentry.utils.sdk.configure_scope", mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch is None @@ -232,7 +232,7 @@ def test_scope_has_wrong_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/tricks/{trick_name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch_configure_scope_with_scope("sentry.utils.sdk.configure_scope", mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch == { "scope_transaction": "/tricks/{trick_name}/", @@ -302,7 +302,7 @@ def test_no_scope_data_passed(self, mock_sdk_capture_exception: MagicMock): capture_exception_with_scope_check(Exception()) passed_scope = mock_sdk_capture_exception.call_args.kwargs["scope"] - empty_scope = Scope(client=passed_scope.client) + empty_scope = Scope() for entry in empty_scope.__slots__: # _propagation_context is generated on __init__ for tracing without performance From 770de43f85602e5fc993c59dd0da5b86ba2f60f2 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 6 Mar 2024 10:27:28 +0100 Subject: [PATCH 086/145] feat(ddm): Filter empty series from summary table (#66400) Workaround as the endpoint returns empty series for groups that are excluded by a filter. --- static/app/views/ddm/summaryTable.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/static/app/views/ddm/summaryTable.tsx b/static/app/views/ddm/summaryTable.tsx index bb4ae5e3784aa7..11f96699acbf0a 100644 --- a/static/app/views/ddm/summaryTable.tsx +++ b/static/app/views/ddm/summaryTable.tsx @@ -118,6 +118,8 @@ export const SummaryTable = memo(function SummaryTable({ ...getValues(s.data), }; }) + // Filter series with no data + .filter(s => s.min !== Infinity) .sort((a, b) => { const {name, order} = sort; if (!name) { From e8cedc2607b2599fe65d06e90f73a045d43ce029 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 6 Mar 2024 10:38:18 +0100 Subject: [PATCH 087/145] Clean up `ArtifactBundle(FlatFile)Index`, step 3 (#66208) This is the third step, following https://github.com/getsentry/sentry/pull/66206 and https://github.com/getsentry/sentry/pull/66207: In step 3, we fully drop the obsolete columns and tables after they have been removed from the migration state in prior steps. --- migrations_lockfile.txt | 2 +- .../0663_artifactbundleindex_cleanup_step3.py | 55 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt index dd7cfa2517ed34..4dcff89b5c6903 100644 --- a/migrations_lockfile.txt +++ b/migrations_lockfile.txt @@ -9,5 +9,5 @@ feedback: 0004_index_together hybridcloud: 0013_add_orgauthtokenreplica_token_index nodestore: 0002_nodestore_no_dictfield replays: 0004_index_together -sentry: 0662_monitor_drop_last_state_change +sentry: 0663_artifactbundleindex_cleanup_step3 social_auth: 0002_default_auto_field diff --git a/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py new file mode 100644 index 00000000000000..895c1324341777 --- /dev/null +++ b/src/sentry/migrations/0663_artifactbundleindex_cleanup_step3.py @@ -0,0 +1,55 @@ +# Generated by Django 5.0.2 on 2024-03-04 10:50 + +from django.db import migrations + +from sentry.new_migrations.migrations import CheckedMigration + + +class Migration(CheckedMigration): + # This flag is used to mark that a migration shouldn't be automatically run in production. For + # the most part, this should only be used for operations where it's safe to run the migration + # after your code has deployed. So this should not be used for most operations that alter the + # schema of a table. + # Here are some things that make sense to mark as dangerous: + # - Large data migrations. Typically we want these to be run manually by ops so that they can + # be monitored and not block the deploy for a long period of time while they run. + # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to + # have ops run this and not block the deploy. Note that while adding an index is a schema + # change, it's completely safe to run the operation after the code has deployed. + is_dangerous = False + + dependencies = [ + ("sentry", "0662_monitor_drop_last_state_change"), + ] + + operations = [ + migrations.RunSQL( + """ + DROP TABLE "sentry_artifactbundleflatfileindex"; + """, + # We just create a fake table here so that the DROP will work if we roll back the migration. + reverse_sql="CREATE TABLE sentry_artifactbundleflatfileindex (fake_col int);", + hints={"tables": ["sentry_artifactbundleflatfileindex"]}, + ), + migrations.RunSQL( + """ + DROP TABLE "sentry_flatfileindexstate"; + """, + # We just create a fake table here so that the DROP will work if we roll back the migration. + reverse_sql="CREATE TABLE sentry_flatfileindexstate (fake_col int);", + hints={"tables": ["sentry_flatfileindexstate"]}, + ), + migrations.RunSQL( + """ + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "release_name"; + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "dist_name"; + ALTER TABLE "sentry_artifactbundleindex" DROP COLUMN "date_last_modified"; + """, + reverse_sql=""" + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "release_name" varchar(250) NULL; + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "dist_name" varchar(64) NULL; + ALTER TABLE "sentry_artifactbundleindex" ADD COLUMN "date_last_modified" timestamptz NULL; + """, + hints={"tables": ["sentry_artifactbundleindex"]}, + ), + ] From e51094f77ef39a49fef7ffdbad0ab4a96e3b4175 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 6 Mar 2024 11:21:48 +0100 Subject: [PATCH 088/145] feat(ddm): Hiding queries (#66398) Allow toggling queries from being displayed by clicking their query symbol. - closes https://github.com/getsentry/sentry/issues/66201 --- static/app/utils/metrics/constants.tsx | 2 + static/app/utils/metrics/types.tsx | 1 + static/app/views/ddm/context.tsx | 37 +- static/app/views/ddm/pageHeaderActions.tsx | 1 + static/app/views/ddm/queries.tsx | 170 +++-- static/app/views/ddm/querySymbol.tsx | 41 +- static/app/views/ddm/scratchpad.tsx | 76 +- .../parseMetricWidgetsQueryParam.spec.tsx | 678 ++++++++++-------- .../utils/parseMetricWidgetsQueryParam.tsx | 1 + .../views/ddm/utils/useStructuralSharing.tsx | 10 +- 10 files changed, 581 insertions(+), 436 deletions(-) diff --git a/static/app/utils/metrics/constants.tsx b/static/app/utils/metrics/constants.tsx index 45d5844d2bc098..2a7a290166b134 100644 --- a/static/app/utils/metrics/constants.tsx +++ b/static/app/utils/metrics/constants.tsx @@ -40,6 +40,7 @@ export const emptyMetricsQueryWidget: MetricQueryWidgetParams = { groupBy: [], sort: DEFAULT_SORT_STATE, displayType: MetricDisplayType.LINE, + isHidden: false, }; export const emptyMetricsFormulaWidget: MetricFormulaWidgetParams = { @@ -48,4 +49,5 @@ export const emptyMetricsFormulaWidget: MetricFormulaWidgetParams = { formula: '', sort: DEFAULT_SORT_STATE, displayType: MetricDisplayType.LINE, + isHidden: false, }; diff --git a/static/app/utils/metrics/types.tsx b/static/app/utils/metrics/types.tsx index 962928a3c52fd8..8b0b561739c481 100644 --- a/static/app/utils/metrics/types.tsx +++ b/static/app/utils/metrics/types.tsx @@ -35,6 +35,7 @@ export enum MetricQueryType { export interface BaseWidgetParams { displayType: MetricDisplayType; id: number; + isHidden: boolean; type: MetricQueryType; focusedSeries?: FocusedMetricsSeries[]; sort?: SortState; diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx index a74218c1d3c68d..615db3a0af07ee 100644 --- a/static/app/views/ddm/context.tsx +++ b/static/app/views/ddm/context.tsx @@ -52,6 +52,7 @@ interface DDMContextValue { >; setSelectedWidgetIndex: (index: number) => void; showQuerySymbols: boolean; + toggleWidgetVisibility: (index: number) => void; updateWidget: ( index: number, data: Partial> @@ -80,6 +81,7 @@ export const DDMContext = createContext({ showQuerySymbols: false, updateWidget: () => {}, widgets: [], + toggleWidgetVisibility: () => {}, }); export function useDDMContext() { @@ -159,8 +161,13 @@ export function useMetricWidgets() { const removeWidget = useCallback( (index: number) => { setWidgets(currentWidgets => { - const newWidgets = [...currentWidgets]; + let newWidgets = [...currentWidgets]; newWidgets.splice(index, 1); + + // Ensure that a visible widget remains + if (!newWidgets.find(w => !w.isHidden)) { + newWidgets = newWidgets.map(w => ({...w, isHidden: false})); + } return newWidgets; }); }, @@ -186,6 +193,7 @@ export function useMetricWidgets() { addWidget, removeWidget, duplicateWidget, + setWidgets, }; } @@ -334,16 +342,32 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { (value: boolean) => { updateQuery({multiChartMode: value ? 1 : 0}, {replace: true}); updateWidget(0, {focusedSeries: undefined}); - setSelectedWidgetIndex(0); + const firstVisibleWidgetIndex = widgets.findIndex(w => !w.isHidden); + setSelectedWidgetIndex(firstVisibleWidgetIndex); }, - [updateQuery, updateWidget] + [updateQuery, updateWidget, widgets] ); + const toggleWidgetVisibility = useCallback( + (index: number) => { + if (index === selectedWidgetIndex) { + const firstVisibleWidgetIndex = widgets.findIndex(w => !w.isHidden); + setSelectedWidgetIndex(firstVisibleWidgetIndex); + } + updateWidget(index, {isHidden: !widgets[index].isHidden}); + }, + [selectedWidgetIndex, updateWidget, widgets] + ); + + const selectedWidget = widgets[selectedWidgetIndex]; + const isSelectionValid = selectedWidget && !selectedWidget.isHidden; + const contextValue = useMemo( () => ({ addWidget: handleAddWidget, - selectedWidgetIndex: - selectedWidgetIndex > widgets.length - 1 ? 0 : selectedWidgetIndex, + selectedWidgetIndex: isSelectionValid + ? selectedWidgetIndex + : widgets.findIndex(w => !w.isHidden), setSelectedWidgetIndex: handleSetSelectedWidgetIndex, updateWidget: handleUpdateWidget, removeWidget, @@ -360,9 +384,11 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { setIsMultiChartMode: handleSetIsMultiChartMode, metricsSamples, setMetricsSamples, + toggleWidgetVisibility, }), [ handleAddWidget, + isSelectionValid, selectedWidgetIndex, widgets, handleSetSelectedWidgetIndex, @@ -377,6 +403,7 @@ export function DDMContextProvider({children}: {children: React.ReactNode}) { isMultiChartMode, handleSetIsMultiChartMode, metricsSamples, + toggleWidgetVisibility, ] ); diff --git a/static/app/views/ddm/pageHeaderActions.tsx b/static/app/views/ddm/pageHeaderActions.tsx index a99eb74aa5cbb9..2d5298ef72e51a 100644 --- a/static/app/views/ddm/pageHeaderActions.tsx +++ b/static/app/views/ddm/pageHeaderActions.tsx @@ -119,6 +119,7 @@ export function PageHeaderActions({showCustomMetricButton, addCustomMetric}: Pro , ] diff --git a/static/app/views/ddm/queries.tsx b/static/app/views/ddm/queries.tsx index e31570e41b8265..b891e3ca6a47c5 100644 --- a/static/app/views/ddm/queries.tsx +++ b/static/app/views/ddm/queries.tsx @@ -4,6 +4,7 @@ import * as echarts from 'echarts/core'; import {Button} from 'sentry/components/button'; import SwitchButton from 'sentry/components/switchButton'; +import {Tooltip} from 'sentry/components/tooltip'; import {IconAdd} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -32,6 +33,7 @@ export function Queries() { isMultiChartMode, setIsMultiChartMode, addWidget, + toggleWidgetVisibility, } = useDDMContext(); const {selection} = usePageFilters(); @@ -62,6 +64,8 @@ export function Queries() { return [querySymbolSet, formulaSymbolSet]; }, [widgets]); + const visibleWidgets = widgets.filter(widget => !widget.isHidden); + return ( @@ -71,53 +75,24 @@ export function Queries() { setSelectedWidgetIndex(index)} - role={isMultiChartMode ? 'button' : undefined} - aria-label={t('Select query')} - /> - ) - } - contextMenu={ - - } + showQuerySymbols={showQuerySymbols} + isSelected={index === selectedWidgetIndex} + canBeHidden={visibleWidgets.length > 1} /> ) : ( setSelectedWidgetIndex(index)} - role={isMultiChartMode ? 'button' : undefined} - aria-label={t('Select query')} - /> - ) - } - contextMenu={} + showQuerySymbols={showQuerySymbols} + isSelected={index === selectedWidgetIndex} + canBeHidden={visibleWidgets.length > 1} /> )} @@ -151,21 +126,25 @@ export function Queries() { } interface QueryProps { + canBeHidden: boolean; index: number; + isSelected: boolean; onChange: (index: number, data: Partial) => void; + onToggleVisibility: (index: number) => void; projects: number[]; + showQuerySymbols: boolean; widget: MetricQueryWidgetParams; - contextMenu?: React.ReactNode; - symbol?: React.ReactNode; } -export function Query({ +function Query({ widget, projects, onChange, - contextMenu, - symbol, + onToggleVisibility, index, + isSelected, + showQuerySymbols, + canBeHidden, }: QueryProps) { const metricsQuery = useMemo( () => ({ @@ -177,6 +156,10 @@ export function Query({ [widget.groupBy, widget.mri, widget.op, widget.query] ); + const handleToggle = useCallback(() => { + onToggleVisibility(index); + }, [index, onToggleVisibility]); + const handleChange = useCallback( (data: Partial) => { onChange(index, data); @@ -184,9 +167,19 @@ export function Query({ [index, onChange] ); + const isToggleDisabled = !canBeHidden && !widget.isHidden; + return ( - - {symbol} + + {showQuerySymbols && ( + + )} - {contextMenu} + ); } interface FormulaProps { availableVariables: Set; + canBeHidden: boolean; formulaVariables: Set; index: number; + isSelected: boolean; onChange: (index: number, data: Partial) => void; + onToggleVisibility: (index: number) => void; + showQuerySymbols: boolean; widget: MetricFormulaWidgetParams; - contextMenu?: React.ReactNode; - symbol?: React.ReactNode; } -export function Formula({ +function Formula({ availableVariables, formulaVariables, index, widget, onChange, - contextMenu, - symbol, + onToggleVisibility, + canBeHidden, + isSelected, + showQuerySymbols, }: FormulaProps) { + const handleToggle = useCallback(() => { + onToggleVisibility(index); + }, [index, onToggleVisibility]); + const handleChange = useCallback( - (formula: string) => { - onChange(index, {formula}); + (data: Partial) => { + onChange(index, data); }, [index, onChange] ); + + const isToggleDisabled = !canBeHidden && !widget.isHidden; + return ( - - {symbol} + + {showQuerySymbols && ( + + )} handleChange({formula})} /> - {contextMenu} + ); } +interface QueryToggleProps { + disabled: boolean; + isHidden: boolean; + isSelected: boolean; + onChange: (isHidden: boolean) => void; + queryId: number; +} + +function QueryToggle({ + isHidden, + queryId, + disabled, + onChange, + isSelected, +}: QueryToggleProps) { + let tooltipTitle = isHidden ? t('Show query') : t('Hide query'); + if (disabled) { + tooltipTitle = t('At least one query must be visible'); + } + + return ( + + onChange(!isHidden)} + role="button" + aria-label={isHidden ? t('Show query') : t('Hide query')} + /> + + ); +} + const QueryWrapper = styled('div')<{hasSymbol: boolean}>` display: grid; gap: ${space(1)}; @@ -248,6 +305,7 @@ const QueryWrapper = styled('div')<{hasSymbol: boolean}>` const StyledQuerySymbol = styled(QuerySymbol)<{isClickable: boolean}>` margin-top: 10px; + cursor: not-allowed; ${p => p.isClickable && `cursor: pointer;`} `; diff --git a/static/app/views/ddm/querySymbol.tsx b/static/app/views/ddm/querySymbol.tsx index b09b5fcc22cecf..b8c4f81d7434ab 100644 --- a/static/app/views/ddm/querySymbol.tsx +++ b/static/app/views/ddm/querySymbol.tsx @@ -1,3 +1,4 @@ +import {forwardRef} from 'react'; import styled from '@emotion/styled'; import {space} from 'sentry/styles/space'; @@ -15,7 +16,7 @@ export const getQuerySymbol = (index: number) => { return result; }; -const Symbol = styled('div')<{isSelected: boolean}>` +const Symbol = styled('span')<{isSelected: boolean; isHidden?: boolean}>` display: flex; width: 16px; height: 16px; @@ -32,24 +33,34 @@ const Symbol = styled('div')<{isSelected: boolean}>` ${p => p.isSelected && + !p.isHidden && ` background: ${p.theme.purple300}; color: ${p.theme.white}; `} + + ${p => + p.isHidden && + ` + background: ${p.theme.gray300}; + color: ${p.theme.white}; + `} `; -export function QuerySymbol({ - queryId, - isSelected, - ...props -}: React.ComponentProps & {isSelected: boolean; queryId: number}) { - const {showQuerySymbols, isMultiChartMode} = useDDMContext(); - if (!showQuerySymbols || queryId < 0) { - return null; - } - return ( - - {getQuerySymbol(queryId)} - - ); +interface QuerySymbolProps extends React.ComponentProps { + queryId: number; } + +export const QuerySymbol = forwardRef( + function QuerySymbol({queryId, isSelected, ...props}, ref) { + const {showQuerySymbols, isMultiChartMode} = useDDMContext(); + if (!showQuerySymbols || queryId < 0) { + return null; + } + return ( + + {getQuerySymbol(queryId)} + + ); + } +); diff --git a/static/app/views/ddm/scratchpad.tsx b/static/app/views/ddm/scratchpad.tsx index 6b273b5eb48657..fccbad9255271d 100644 --- a/static/app/views/ddm/scratchpad.tsx +++ b/static/app/views/ddm/scratchpad.tsx @@ -42,7 +42,7 @@ function widgetToQuery( op: widget.op, groupBy: widget.groupBy, query: widget.query, - isQueryOnly: isQueryOnly, + isQueryOnly: isQueryOnly || widget.isHidden, }; } @@ -172,40 +172,42 @@ export function MetricScratchpad() { return ( {isMultiChartMode ? ( - filteredWidgets.map((widget, index) => ( - - {queries => ( - 1} - onChange={handleChange} - filters={selection} - focusAreaProps={focusArea} - showQuerySymbols={showQuerySymbols} - onSampleClick={handleSampleClick} - onSampleClickV2={handleSampleClickV2} - chartHeight={200} - highlightedSampleId={ - selectedWidgetIndex === index ? highlightedSampleId : undefined - } - metricsSamples={metricsSamples} - context="ddm" - /> - )} - - )) + filteredWidgets.map((widget, index) => + widget.isHidden ? null : ( + + {queries => ( + 1} + onChange={handleChange} + filters={selection} + focusAreaProps={focusArea} + showQuerySymbols={showQuerySymbols} + onSampleClick={handleSampleClick} + onSampleClickV2={handleSampleClickV2} + chartHeight={200} + highlightedSampleId={ + selectedWidgetIndex === index ? highlightedSampleId : undefined + } + metricsSamples={metricsSamples} + context="ddm" + /> + )} + + ) + ) ) : ( widgetToQuery(w))} + queries={filteredWidgets + .filter(w => !(w.type === MetricQueryType.FORMULA && w.isHidden)) + .map(w => widgetToQuery(w))} isSelected hasSiblings={false} onChange={handleChange} diff --git a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx index 79700ff8ad3e4c..7286dfd2714ee0 100644 --- a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx +++ b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.spec.tsx @@ -1,358 +1,398 @@ import {emptyMetricsQueryWidget} from 'sentry/utils/metrics/constants'; -import {MetricQueryType} from 'sentry/utils/metrics/types'; +import { + MetricDisplayType, + MetricQueryType, + type MetricWidgetQueryParams, +} from 'sentry/utils/metrics/types'; import {parseMetricWidgetsQueryParam} from 'sentry/views/ddm/utils/parseMetricWidgetsQueryParam'; +function testParsing(input: any, result: MetricWidgetQueryParams[]) { + expect(parseMetricWidgetsQueryParam(JSON.stringify(input))).toStrictEqual(result); +} + describe('parseMetricWidgetQueryParam', () => { const defaultState = [{...emptyMetricsQueryWidget, id: 0}]; it('returns default widget for invalid param', () => { - expect(parseMetricWidgetsQueryParam(undefined)).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('{}')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('true')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('2')).toStrictEqual(defaultState); - expect(parseMetricWidgetsQueryParam('"test"')).toStrictEqual(defaultState); + testParsing(undefined, defaultState); + testParsing({}, defaultState); + testParsing(true, defaultState); + testParsing(2, defaultState); + testParsing('', defaultState); + testParsing('test', defaultState); // empty array is not valid - expect(parseMetricWidgetsQueryParam('[]')).toStrictEqual(defaultState); + testParsing([], defaultState); }); it('returns a single widget', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {order: 'asc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + [ + // INPUT + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {order: 'asc'}, + isHidden: true, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: undefined, order: 'asc'}, + isHidden: true, + }, + ] + ); }); it('returns multiple widgets', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - { - id: 1, - type: MetricQueryType.QUERY, - mri: 'd:custom/sentry.event_manager.save@second', - op: 'avg', - query: '', - groupBy: ['event_type'], - displayType: 'line', - powerUserMode: false, - focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], - sort: {name: 'sum', order: 'asc'}, - }, - { - id: 2, - type: MetricQueryType.FORMULA, - formula: 'a + b', - displayType: 'line', - sort: {name: 'avg', order: 'desc'}, - focusedSeries: [], - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - { - id: 1, - type: MetricQueryType.QUERY, - mri: 'd:custom/sentry.event_manager.save@second', - op: 'avg', - query: '', - groupBy: ['event_type'], - displayType: 'line', - powerUserMode: false, - focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], - sort: {name: 'sum', order: 'asc'}, - }, - { - id: 2, - type: MetricQueryType.FORMULA, - formula: 'a + b', - displayType: 'line', - sort: {name: 'avg', order: 'desc'}, - focusedSeries: [], - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: true, + }, + { + id: 1, + type: MetricQueryType.QUERY, + mri: 'd:custom/sentry.event_manager.save@second', + op: 'avg', + query: '', + groupBy: ['event_type'], + displayType: 'line', + powerUserMode: false, + focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], + sort: {name: 'sum', order: 'asc'}, + isHidden: false, + }, + { + id: 2, + type: MetricQueryType.FORMULA, + formula: 'a + b', + displayType: 'line', + sort: {name: 'avg', order: 'desc'}, + focusedSeries: [], + isHidden: true, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: true, + }, + { + id: 1, + type: MetricQueryType.QUERY, + mri: 'd:custom/sentry.event_manager.save@second', + op: 'avg', + query: '', + groupBy: ['event_type'], + displayType: MetricDisplayType.LINE, + powerUserMode: false, + focusedSeries: [{id: 'default', groupBy: {event_type: 'default'}}], + sort: {name: 'sum', order: 'asc'}, + isHidden: false, + }, + { + id: 2, + type: MetricQueryType.FORMULA, + formula: 'a + b', + displayType: MetricDisplayType.LINE, + sort: {name: 'avg', order: 'desc'}, + focusedSeries: [], + isHidden: true, + }, + ] + ); }); it('falls back to defaults', () => { // Missing values - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - mri: 'd:transactions/duration@millisecond', - }, - { - type: MetricQueryType.FORMULA, - formula: 'a * 2', - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - { - id: 1, - type: MetricQueryType.FORMULA, - formula: 'a * 2', - displayType: 'line', - focusedSeries: [], - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + }, + { + type: MetricQueryType.FORMULA, + formula: 'a * 2', + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + { + id: 1, + type: MetricQueryType.FORMULA, + formula: 'a * 2', + displayType: MetricDisplayType.LINE, + focusedSeries: [], + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); // Invalid values - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 'invalid', - type: 123, - mri: 'd:transactions/duration@millisecond', - op: 1, - query: 12, - groupBy: true, - displayType: 'aasfcsdf', - focusedSeries: {}, - powerUserMode: 1, - sort: {name: 1, order: 'invalid'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 'invalid', + type: 123, + mri: 'd:transactions/duration@millisecond', + op: 1, + query: 12, + groupBy: true, + displayType: 'aasfcsdf', + focusedSeries: {}, + powerUserMode: 1, + sort: {name: 1, order: 'invalid'}, + isHidden: 'foo', + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); }); it('ignores invalid widgets', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - mri: 'd:transactions/duration@millisecond', - }, - { - // Missing MRI - }, - { - // Mallformed MRI - mri: 'transactions/duration@millisecond', - }, - { - // Duplicate id - id: 0, - mri: 'd:transactions/duration@second', - }, - { - // Missing formula - type: MetricQueryType.FORMULA, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'avg', - query: '', - groupBy: [], - displayType: 'line', - focusedSeries: [], - powerUserMode: false, - sort: {name: undefined, order: 'asc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 0, + mri: 'd:transactions/duration@millisecond', + }, + { + // Missing MRI + }, + { + // Mallformed MRI + mri: 'transactions/duration@millisecond', + }, + { + // Duplicate id + id: 0, + mri: 'd:transactions/duration@second', + }, + { + // Missing formula + type: MetricQueryType.FORMULA, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'avg', + query: '', + groupBy: [], + displayType: MetricDisplayType.LINE, + focusedSeries: [], + powerUserMode: false, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); }); it('returns default widget if there is no valid widget', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - // Missing MRI - }, - { - // Missing formula - type: MetricQueryType.FORMULA, - }, - ]) - ) - ).toStrictEqual(defaultState); + testParsing( + // INPUT + [ + { + // Missing MRI + }, + { + // Missing formula + type: MetricQueryType.FORMULA, + }, + ], + // RESULT + defaultState + ); }); it('handles missing array in array params', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: 'dist', - displayType: 'line', - focusedSeries: {id: 'default', groupBy: {dist: 'default'}}, - powerUserMode: true, - sort: {order: 'asc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', + testParsing( + // INPUT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: 'dist', + displayType: 'line', + focusedSeries: {id: 'default', groupBy: {dist: 'default'}}, + powerUserMode: true, + sort: {order: 'asc'}, + isHidden: false, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: undefined, order: 'asc'}, + isHidden: false, + }, + ] + ); + }); + + it('adds missing ids', () => { + function widgetWithId(id: T) { + return { + id, + type: MetricQueryType.QUERY as const, + mri: 'd:transactions/duration@millisecond' as const, + op: 'sum' as const, query: 'test:query', groupBy: ['dist'], - displayType: 'line', + displayType: MetricDisplayType.LINE, focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], powerUserMode: true, - sort: {name: undefined, order: 'asc'}, - }, - ]); - }); + sort: {name: 'avg' as const, order: 'desc' as const}, + isHidden: false, + }; + } - it('adds missing ids', () => { - const widgetWithId = (id: number | undefined) => ({ - id, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }); - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - widgetWithId(0), - widgetWithId(undefined), - widgetWithId(2), - { - // Invalid widget - }, - widgetWithId(undefined), - widgetWithId(3), - ]) - ) - ).toStrictEqual([ - widgetWithId(0), - widgetWithId(1), - widgetWithId(2), - widgetWithId(4), - widgetWithId(3), - ]); + testParsing( + // INPUT + [ + widgetWithId(0), + widgetWithId(undefined), + widgetWithId(2), + { + // Invalid widget + }, + widgetWithId(undefined), + widgetWithId(3), + ], + // RESULT + [ + widgetWithId(0), + widgetWithId(1), + widgetWithId(2), + widgetWithId(4), + widgetWithId(3), + ] + ); }); it('resets the id of a single widget to 0', () => { - expect( - parseMetricWidgetsQueryParam( - JSON.stringify([ - { - id: 5, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - ]) - ) - ).toStrictEqual([ - { - id: 0, - type: MetricQueryType.QUERY, - mri: 'd:transactions/duration@millisecond', - op: 'sum', - query: 'test:query', - groupBy: ['dist'], - displayType: 'line', - focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], - powerUserMode: true, - sort: {name: 'avg', order: 'desc'}, - }, - ]); + testParsing( + // INPUT + [ + { + id: 5, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: 'line', + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: false, + }, + ], + // RESULT + [ + { + id: 0, + type: MetricQueryType.QUERY, + mri: 'd:transactions/duration@millisecond', + op: 'sum', + query: 'test:query', + groupBy: ['dist'], + displayType: MetricDisplayType.LINE, + focusedSeries: [{id: 'default', groupBy: {dist: 'default'}}], + powerUserMode: true, + sort: {name: 'avg', order: 'desc'}, + isHidden: false, + }, + ] + ); }); }); diff --git a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx index a6db62f6a7829a..5246e600c0c8a7 100644 --- a/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx +++ b/static/app/views/ddm/utils/parseMetricWidgetsQueryParam.tsx @@ -214,6 +214,7 @@ export function parseMetricWidgetsQueryParam( : MetricDisplayType.LINE, focusedSeries: parseArrayParam(widget, 'focusedSeries', parseFocusedSeries), sort: parseSortParam(widget, 'sort'), + isHidden: parseBooleanParam(widget, 'isHidden') ?? false, }; switch (type) { diff --git a/static/app/views/ddm/utils/useStructuralSharing.tsx b/static/app/views/ddm/utils/useStructuralSharing.tsx index 4f6deb0323b74e..e052fc6d37a52d 100644 --- a/static/app/views/ddm/utils/useStructuralSharing.tsx +++ b/static/app/views/ddm/utils/useStructuralSharing.tsx @@ -56,11 +56,11 @@ export function structuralSharing(oldValue: T, newValue: T): T { return newValue; } -export const useStructuralSharing = (value: any) => { - const previeousValue = useRef(value); +export function useStructuralSharing(value: T): T { + const previousValue = useRef(value); return useMemo(() => { - const newValue = structuralSharing(previeousValue.current, value); - previeousValue.current = newValue; + const newValue = structuralSharing(previousValue.current, value); + previousValue.current = newValue; return newValue; }, [value]); -}; +} From 910ed9566eec5c095eb3e8d0d80ecbd952741414 Mon Sep 17 00:00:00 2001 From: ArthurKnaus Date: Wed, 6 Mar 2024 11:22:04 +0100 Subject: [PATCH 089/145] fix(ddm): Show samples only on selected widget (#66404) --- static/app/views/ddm/widget.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/static/app/views/ddm/widget.tsx b/static/app/views/ddm/widget.tsx index 06f13495e0fafb..ceed27e153459f 100644 --- a/static/app/views/ddm/widget.tsx +++ b/static/app/views/ddm/widget.tsx @@ -247,8 +247,8 @@ export const MetricWidget = memo( getChartPalette={getChartPalette} onChange={handleChange} focusAreaProps={focusAreaProps} - samples={samples} - samplesV2={samplesV2} + samples={isSelected ? samples : undefined} + samplesV2={isSelected ? samplesV2 : undefined} chartHeight={chartHeight} chartGroup={DDM_CHART_GROUP} queries={queries} From 58b07d2c12e7503a9c94c3f19c1ef9b6ee8684ed Mon Sep 17 00:00:00 2001 From: Iker Barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:35:02 +0100 Subject: [PATCH 090/145] ref(deps): Bump sentry-relay to v0.8.48 (#66401) Along the way of bumping dependencies, some tests are fixed to cover for the changes in Relay. --- requirements-base.txt | 2 +- requirements-dev-frozen.txt | 2 +- requirements-frozen.txt | 2 +- src/sentry/receivers/features.py | 2 +- .../api/endpoints/test_relay_globalconfig_v3.py | 15 +++++++++++++++ .../test_spans/test_multiple_full.pysnap | 4 +--- .../snapshots/test_spans/test_single_full.pysnap | 4 +--- tests/sentry/event_manager/test_normalization.py | 2 +- tests/sentry/eventstore/test_models.py | 13 ++++++++++++- 9 files changed, 34 insertions(+), 12 deletions(-) diff --git a/requirements-base.txt b/requirements-base.txt index fa6f922654d677..518dda18cf1ce4 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -65,7 +65,7 @@ sentry-arroyo>=2.16.2 sentry-kafka-schemas>=0.1.58 sentry-ophio==0.1.5 sentry-redis-tools>=0.1.7 -sentry-relay>=0.8.45 +sentry-relay>=0.8.48 sentry-sdk>=1.39.2 snuba-sdk>=2.0.29 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index bde98736cf313d..ae4c46b61f9e18 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -180,7 +180,7 @@ sentry-forked-djangorestframework-stubs==3.14.5.post1 sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 -sentry-relay==0.8.45 +sentry-relay==0.8.48 sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index b0ef79fddf89a1..004b1d338a4f08 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -122,7 +122,7 @@ sentry-arroyo==2.16.2 sentry-kafka-schemas==0.1.58 sentry-ophio==0.1.5 sentry-redis-tools==0.1.7 -sentry-relay==0.8.45 +sentry-relay==0.8.48 sentry-sdk==1.39.2 sentry-usage-accountant==0.0.10 simplejson==3.17.6 diff --git a/src/sentry/receivers/features.py b/src/sentry/receivers/features.py index e653644ed170ed..db56bf7f2c7f88 100644 --- a/src/sentry/receivers/features.py +++ b/src/sentry/receivers/features.py @@ -122,7 +122,7 @@ def record_event_processed(project, event, **kwargs): # Check to make sure more the ip address is being sent. # testing for this in test_no_user_tracking_for_ip_address_only # list(d.keys()) pattern is to make this python3 safe - if user_context and list(user_context.keys()) != ["ip_address"]: + if user_context and len(user_context.keys() - {"ip_address", "sentry_user"}) > 0: feature_slugs.append("user_tracking") # Custom Tags diff --git a/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py b/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py index 9000f7bbfd2bb5..d0d696a20b0309 100644 --- a/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py +++ b/tests/sentry/api/endpoints/test_relay_globalconfig_v3.py @@ -33,6 +33,21 @@ def inner(version, global_): @pytest.mark.django_db def test_global_config(): config = get_global_config() + # Set options to Relay's non-default values to avoid Relay skipping deserialization + config["options"]["relay.cardinality-limiter.error-sample-rate"] = 1.0 + config["options"]["profiling.profile_metrics.unsampled_profiles.enabled"] = True + config["options"]["profiling.profile_metrics.unsampled_profiles.platforms"] = ["fake-platform"] + config["options"]["profiling.profile_metrics.unsampled_profiles.sample_rate"] = 1.0 + config["options"]["relay.metric-bucket-encodings"] = { + "sessions": "array", + "transactions": "array", + "spans": "array", + "custom": "array", + "unsupported": "array", + } + config["options"]["relay.span-usage-metric"] = True + config["options"]["relay.cardinality-limiter.mode"] = "passive" + normalized = normalize_global_config(config) assert normalized == config diff --git a/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_multiple_full.pysnap b/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_multiple_full.pysnap index 5713fe514ed89a..dab00a21904c12 100644 --- a/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_multiple_full.pysnap +++ b/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_multiple_full.pysnap @@ -1,13 +1,11 @@ --- -created: '2019-07-11T19:30:21.322813Z' -creator: sentry source: tests/sentry/event_manager/interfaces/test_spans.py --- errors: null to_json: - data: + http.response.status_code: 200 reason: OK - status_code: 200 description: GET http://example.com op: http span_id: 8c931f4740435fb8 diff --git a/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_single_full.pysnap b/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_single_full.pysnap index 1b1857ee2c88fa..c7de28597ba2ab 100644 --- a/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_single_full.pysnap +++ b/tests/sentry/event_manager/interfaces/snapshots/test_spans/test_single_full.pysnap @@ -1,13 +1,11 @@ --- -created: '2019-07-11T19:26:32.626601Z' -creator: sentry source: tests/sentry/event_manager/interfaces/test_spans.py --- errors: null to_json: - data: + http.response.status_code: 200 reason: OK - status_code: 200 description: GET http://example.com op: http span_id: 8c931f4740435fb8 diff --git a/tests/sentry/event_manager/test_normalization.py b/tests/sentry/event_manager/test_normalization.py index 5963a286fa72e9..8ab91e1cfb2182 100644 --- a/tests/sentry/event_manager/test_normalization.py +++ b/tests/sentry/event_manager/test_normalization.py @@ -42,7 +42,7 @@ def test_interface_is_relabeled(): manager.normalize() data = manager.get_data() - assert data["user"] == {"id": "1"} + assert data["user"] == {"id": "1", "sentry_user": "id:1"} @pytest.mark.parametrize("user", ["missing", None, {}, {"ip_address": None}]) diff --git a/tests/sentry/eventstore/test_models.py b/tests/sentry/eventstore/test_models.py index 42cbc9a7ac7165..637445053599a4 100644 --- a/tests/sentry/eventstore/test_models.py +++ b/tests/sentry/eventstore/test_models.py @@ -8,6 +8,7 @@ from sentry.eventstore.models import Event, GroupEvent from sentry.grouping.api import GroupingConfig from sentry.grouping.enhancer import Enhancements +from sentry.interfaces.user import User from sentry.issues.issue_occurrence import IssueOccurrence from sentry.models.environment import Environment from sentry.snuba.dataset import Dataset @@ -254,7 +255,17 @@ def test_snuba_data(self): assert event_from_nodestore.location == event_from_snuba.location assert event_from_nodestore.culprit == event_from_snuba.culprit - assert event_from_nodestore.get_minimal_user() == event_from_snuba.get_minimal_user() + user_from_nodestore = event_from_nodestore.get_minimal_user() + user_from_nodestore = User.to_python( + { + "id": user_from_nodestore._data.get("id"), + "email": user_from_nodestore._data.get("email"), + "username": user_from_nodestore._data.get("username"), + "ip_address": user_from_nodestore._data.get("ip_address"), + } + ) + assert user_from_nodestore == event_from_snuba.get_minimal_user() + assert event_from_nodestore.ip_address == event_from_snuba.ip_address assert event_from_nodestore.tags == event_from_snuba.tags From 0c285c0377f6378e0c7dcae50825e4a9fd5c5f81 Mon Sep 17 00:00:00 2001 From: Nar Saynorath Date: Wed, 6 Mar 2024 09:28:09 -0500 Subject: [PATCH 091/145] feat(discover): Remove commented out replay ID column (#66409) --- static/app/views/discover/results.tsx | 6 ------ 1 file changed, 6 deletions(-) diff --git a/static/app/views/discover/results.tsx b/static/app/views/discover/results.tsx index c281b73c41e9ef..cf8df209643f6c 100644 --- a/static/app/views/discover/results.tsx +++ b/static/app/views/discover/results.tsx @@ -299,12 +299,6 @@ export class Results extends Component { // If the view is not valid, redirect to a known valid state. const {location, organization, selection, isHomepage, savedQuery} = this.props; - // const isReplayEnabled = organization.features.includes('session-replay'); - // const defaultEventView = Object.assign({}, DEFAULT_EVENT_VIEW, { - // fields: isReplayEnabled - // ? DEFAULT_EVENT_VIEW.fields.concat(['replayId']) - // : DEFAULT_EVENT_VIEW.fields, - // }); const query = isHomepage && savedQuery ? omit(savedQuery, 'id') : DEFAULT_EVENT_VIEW; const nextEventView = EventView.fromNewQueryWithLocation(query, location); From 3afdb48b6c21130f30d39edd4e2952744a873bb6 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 6 Mar 2024 09:31:46 -0500 Subject: [PATCH 092/145] chore(hybridcloud) Remove unused parameter from sent_incident_alert_notification (#66113) Complete work started in #65736, #65791, and #65883 to remove a parameter from the send_incident_alert_notification RPC method. Fixes HC-1123 Fixes SENTRY-2QAG --- src/sentry/services/hybrid_cloud/integration/impl.py | 8 +------- src/sentry/services/hybrid_cloud/integration/service.py | 4 +--- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/src/sentry/services/hybrid_cloud/integration/impl.py b/src/sentry/services/hybrid_cloud/integration/impl.py index 5a02f9c913cdd1..90b8c46c337101 100644 --- a/src/sentry/services/hybrid_cloud/integration/impl.py +++ b/src/sentry/services/hybrid_cloud/integration/impl.py @@ -32,7 +32,6 @@ serialize_integration_external_project, serialize_organization_integration, ) -from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult from sentry.shared_integrations.exceptions import ApiError from sentry.utils import json, metrics @@ -357,15 +356,10 @@ def send_incident_alert_notification( incident_id: int, new_status: int, incident_attachment_json: str, - organization: RpcOrganizationSummary | None = None, # deprecated - organization_id: int | None = None, + organization_id: int, metric_value: str | None = None, notification_uuid: str | None = None, ) -> bool: - if organization_id is None and organization is not None: - organization_id = organization.id - assert organization_id is not None, "organization or organization_id is required" - sentry_app = SentryApp.objects.get(id=sentry_app_id) metrics.incr("notifications.sent", instance=sentry_app.slug, skip_internal=False) diff --git a/src/sentry/services/hybrid_cloud/integration/service.py b/src/sentry/services/hybrid_cloud/integration/service.py index fbe7932ab7be39..6776e7814d8ec0 100644 --- a/src/sentry/services/hybrid_cloud/integration/service.py +++ b/src/sentry/services/hybrid_cloud/integration/service.py @@ -12,7 +12,6 @@ RpcIntegrationExternalProject, RpcIntegrationIdentityContext, ) -from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method from sentry.silo import SiloMode @@ -238,8 +237,7 @@ def send_incident_alert_notification( incident_id: int, new_status: int, incident_attachment_json: str, - organization: RpcOrganizationSummary | None = None, - organization_id: int | None = None, + organization_id: int, metric_value: str | None = None, notification_uuid: str | None = None, ) -> bool: From 0ca66611fbc27cdb10ba0b5fc5b0b3549f86042b Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 6 Mar 2024 09:34:31 -0500 Subject: [PATCH 093/145] fix(hybridcloud) Make bitbucket uninstall silo safe (#66354) Follow up on a TODO that has triggered an error. Add tests so that future maintenance on the bitbucket integration is a bit easier. Fixes SENTRY-2TNS --- .../integrations/bitbucket/uninstalled.py | 18 ++--- .../bitbucket/test_uninstalled.py | 66 +++++++++++++++++++ 2 files changed, 73 insertions(+), 11 deletions(-) create mode 100644 tests/sentry/integrations/bitbucket/test_uninstalled.py diff --git a/src/sentry/integrations/bitbucket/uninstalled.py b/src/sentry/integrations/bitbucket/uninstalled.py index 90ce7563e513c7..071e1faed76df0 100644 --- a/src/sentry/integrations/bitbucket/uninstalled.py +++ b/src/sentry/integrations/bitbucket/uninstalled.py @@ -8,9 +8,8 @@ from sentry.constants import ObjectStatus from sentry.integrations.utils import AtlassianConnectValidationError, get_integration_from_jwt from sentry.models.integrations.integration import Integration -from sentry.models.organization import Organization -from sentry.models.repository import Repository from sentry.services.hybrid_cloud.integration import integration_service +from sentry.services.hybrid_cloud.repository import repository_service @control_silo_endpoint @@ -44,15 +43,12 @@ def post(self, request: Request, *args, **kwargs) -> Response: org_integrations = integration_service.get_organization_integrations( integration_id=integration.id ) - organizations = Organization.objects.filter( - id__in=[oi.organization_id for oi in org_integrations] - ) - # TODO: Replace with repository_service; support status write - Repository.objects.filter( - organization_id__in=organizations.values_list("id", flat=True), - provider="integrations:bitbucket", - integration_id=integration.id, - ).update(status=ObjectStatus.DISABLED) + for oi in org_integrations: + repository_service.disable_repositories_for_integration( + organization_id=oi.organization_id, + integration_id=integration.id, + provider="integrations:bitbucket", + ) return self.respond() diff --git a/tests/sentry/integrations/bitbucket/test_uninstalled.py b/tests/sentry/integrations/bitbucket/test_uninstalled.py new file mode 100644 index 00000000000000..05504a10a4aed0 --- /dev/null +++ b/tests/sentry/integrations/bitbucket/test_uninstalled.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +from unittest.mock import patch + +from django.urls import reverse + +from sentry.constants import ObjectStatus +from sentry.integrations.utils.atlassian_connect import AtlassianConnectValidationError +from sentry.services.hybrid_cloud.integration.serial import serialize_integration +from sentry.testutils.cases import TestCase +from sentry.testutils.silo import control_silo_test + + +@control_silo_test +class BitbucketUnistalledEndpointTest(TestCase): + def setUp(self): + super().setUp() + self.integration = self.create_integration( + organization=self.organization, + external_id="connection:123", + provider="bitbucket", + metadata={ + "public_key": "public-key", + "base_url": "https://api.bitbucket.org", + "shared_secret": "a-big-secret", + "domain_name": "bitbucket.org/test-org", + "icon": "https://bitbucket.org/account/test-org/avatar/", + "scopes": ["issue:write", "pullrequest", "webhook", "repository"], + "uuid": "u-u-i-d", + "type": "team", + }, + ) + self.install = self.integration.get_installation(self.organization.id) + self.path = reverse("sentry-extensions-bitbucket-uninstalled") + self.repository = self.create_repo( + project=self.project, + provider="integrations:bitbucket", + integration_id=self.integration.id, + ) + + def test_uninstall_missing_auth_header(self): + response = self.client.post(self.path) + + assert response.status_code == 400 + self.repository.refresh_from_db() + assert self.repository.id + + @patch("sentry.integrations.bitbucket.uninstalled.get_integration_from_jwt") + def test_uninstall_missing_integration(self, mock_jwt): + mock_jwt.side_effect = AtlassianConnectValidationError("missing integration") + response = self.client.post(self.path, HTTP_AUTHORIZATION="JWT fake-jwt") + + assert response.status_code == 400 + self.repository.refresh_from_db() + assert self.repository.id + assert self.repository.status == ObjectStatus.ACTIVE + + @patch("sentry.integrations.bitbucket.uninstalled.get_integration_from_jwt") + def test_uninstall_success(self, mock_jwt): + mock_jwt.return_value = serialize_integration(self.integration) + response = self.client.post(self.path, HTTP_AUTHORIZATION="JWT fake-jwt") + + assert response.status_code == 200 + self.repository.refresh_from_db() + assert self.repository.id + assert self.repository.status == ObjectStatus.DISABLED From 2a749cdddd5f12102a2790ea05684ef73c7c7cfe Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 6 Mar 2024 09:35:06 -0500 Subject: [PATCH 094/145] fix(hybridcloud) Add logging for bad RPC requests (#66357) Add logging when we have a bad request from an RPC method. We don't often get sentry errors from these failures, and not being able to see the response of these operations makes debugging harder. --- src/sentry/services/hybrid_cloud/rpc.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/sentry/services/hybrid_cloud/rpc.py b/src/sentry/services/hybrid_cloud/rpc.py index 285d69a9ac1847..13deb3cf10cd1f 100644 --- a/src/sentry/services/hybrid_cloud/rpc.py +++ b/src/sentry/services/hybrid_cloud/rpc.py @@ -518,8 +518,9 @@ def _remote_exception(self, message: str) -> RpcRemoteException: return RpcRemoteException(self.service_name, self.method_name, message) def _raise_from_response_status_error(self, response: requests.Response) -> NoReturn: + rpc_method = f"{self.service_name}.{self.method_name}" with sentry_sdk.configure_scope() as scope: - scope.set_tag("rpc_method", f"{self.service_name}.{self.method_name}") + scope.set_tag("rpc_method", rpc_method) scope.set_tag("rpc_status_code", response.status_code) if in_test_environment(): @@ -535,6 +536,13 @@ def _raise_from_response_status_error(self, response: requests.Response) -> NoRe if response.status_code == 403: raise self._remote_exception("Unauthorized service access") if response.status_code == 400: + logger.warning( + "rpc.bad_request", + extra={ + "rpc_method": rpc_method, + "error": response.content.decode("utf8"), + }, + ) raise self._remote_exception("Invalid service request") raise self._remote_exception(f"Service unavailable ({response.status_code} status)") From 01103b74e950d65acb582523af7ed16879b5e107 Mon Sep 17 00:00:00 2001 From: Billy Vong Date: Wed, 6 Mar 2024 11:37:38 -0330 Subject: [PATCH 095/145] feat(replay): Add feature flag for mobile replay player (#66276) This adds a feature flag to allow support of mobile replay player. Currently for internal testing only --- src/sentry/conf/server.py | 2 ++ src/sentry/features/__init__.py | 1 + 2 files changed, 3 insertions(+) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 6be7b064214ccc..75c8be67659d82 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1848,6 +1848,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:session-replay-enable-canvas-replayer": False, # Enable linking from 'new issue' email notifs to the issue replay list "organizations:session-replay-issue-emails": False, + # Enable mobile replay player + "organizations:session-replay-mobile-player": False, # Enable the new event linking columns to be queried "organizations:session-replay-new-event-counts": False, # Enable Rage Click Issue Creation In Recording Consumer diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 52c016fd4e31d8..0e1abfced2258e 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -242,6 +242,7 @@ default_manager.add("organizations:session-replay-enable-canvas-replayer", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-enable-canvas", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-issue-emails", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:session-replay-mobile-player", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-new-event-counts", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:session-replay-recording-scrubbing", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:session-replay-rage-click-issue-creation", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) From ce143c165287dfbee32520761ad291e59b69456d Mon Sep 17 00:00:00 2001 From: Shruthi Date: Wed, 6 Mar 2024 10:11:12 -0500 Subject: [PATCH 096/145] feat(spans): Tag perf problems created from standalone spans (#66325) As we set up the application logic to run performance issue detection on standalone spans, we want to collect metrics to check if transaction based and span based perf issue detection are equivalent. Tagging metrics with `is_standalone_spans` to track this. This also updates fingerprint so the standalone span occurrences don't get grouped with existing perf issues during testing. --- src/sentry/event_manager.py | 6 +++-- src/sentry/tasks/spans.py | 6 ++++- src/sentry/testutils/cases.py | 8 +++++-- .../performance_detection.py | 24 +++++++++++++++---- tests/sentry/tasks/test_spans.py | 2 +- .../test_m_n_plus_one_db_detector.py | 1 + .../test_performance_detection.py | 3 ++- 7 files changed, 38 insertions(+), 12 deletions(-) diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py index fcac517bf48539..a15200c43cc95c 100644 --- a/src/sentry/event_manager.py +++ b/src/sentry/event_manager.py @@ -2696,10 +2696,12 @@ def _calculate_span_grouping(jobs: Sequence[Job], projects: ProjectsMapping) -> @metrics.wraps("save_event.detect_performance_problems") -def _detect_performance_problems(jobs: Sequence[Job], projects: ProjectsMapping) -> None: +def _detect_performance_problems( + jobs: Sequence[Job], projects: ProjectsMapping, is_standalone_spans: bool = False +) -> None: for job in jobs: job["performance_problems"] = detect_performance_problems( - job["data"], projects[job["project_id"]] + job["data"], projects[job["project_id"]], is_standalone_spans=is_standalone_spans ) diff --git a/src/sentry/tasks/spans.py b/src/sentry/tasks/spans.py index 5c1588d9132ff9..d3c596decc341a 100644 --- a/src/sentry/tasks/spans.py +++ b/src/sentry/tasks/spans.py @@ -97,6 +97,7 @@ def _update_occurrence_group_type(jobs: Sequence[Job], projects: ProjectsMapping performance_problems = job.pop("performance_problems") for performance_problem in performance_problems: performance_problem.type = PerformanceStreamedSpansGroupTypeExperimental + performance_problem.fingerprint = f"{performance_problem.fingerprint}-{PerformanceStreamedSpansGroupTypeExperimental.type_id}" updated_problems.append(performance_problem) job["performance_problems"] = updated_problems @@ -178,7 +179,10 @@ def _process_segment(project_id, segment_id): _pull_out_data(jobs, projects) _calculate_span_grouping(jobs, projects) - _detect_performance_problems(jobs, projects) + _detect_performance_problems(jobs, projects, is_standalone_spans=True) + + # Updates group type and fingerprint of all performance problems + # so they don't double write occurrences as we test. _update_occurrence_group_type(jobs, projects) return jobs diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py index cbec06b057cf61..0d383cf7d862a8 100644 --- a/src/sentry/testutils/cases.py +++ b/src/sentry/testutils/cases.py @@ -637,8 +637,12 @@ def create_performance_issue( perf_event_manager = EventManager(event_data) perf_event_manager.normalize() - def detect_performance_problems_interceptor(data: Event, project: Project): - perf_problems = detect_performance_problems(data, project) + def detect_performance_problems_interceptor( + data: Event, project: Project, is_standalone_spans: bool = False + ): + perf_problems = detect_performance_problems( + data, project, is_standalone_spans=is_standalone_spans + ) if fingerprint: for perf_problem in perf_problems: perf_problem.fingerprint = fingerprint diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index 7f5a3afdff00da..8eb20d550a2e9a 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -110,7 +110,9 @@ def fetch_multi( # Facade in front of performance detection to limit impact of detection on our events ingestion -def detect_performance_problems(data: dict[str, Any], project: Project) -> list[PerformanceProblem]: +def detect_performance_problems( + data: dict[str, Any], project: Project, is_standalone_spans: bool = False +) -> list[PerformanceProblem]: try: rate = options.get("performance.issues.all.problem-detection") if rate and rate > random.random(): @@ -121,7 +123,9 @@ def detect_performance_problems(data: dict[str, Any], project: Project) -> list[ ), sentry_sdk.start_span( op="py.detect_performance_issue", description="none" ) as sdk_span: - return _detect_performance_problems(data, sdk_span, project) + return _detect_performance_problems( + data, sdk_span, project, is_standalone_spans=is_standalone_spans + ) except Exception: logging.exception("Failed to detect performance problems") return [] @@ -325,7 +329,7 @@ def get_detection_settings(project_id: int | None = None) -> dict[DetectorType, def _detect_performance_problems( - data: dict[str, Any], sdk_span: Any, project: Project + data: dict[str, Any], sdk_span: Any, project: Project, is_standalone_spans: bool = False ) -> list[PerformanceProblem]: event_id = data.get("event_id", None) @@ -340,7 +344,14 @@ def _detect_performance_problems( run_detector_on_data(detector, data) # Metrics reporting only for detection, not created issues. - report_metrics_for_detectors(data, event_id, detectors, sdk_span, project.organization) + report_metrics_for_detectors( + data, + event_id, + detectors, + sdk_span, + project.organization, + is_standalone_spans=is_standalone_spans, + ) organization = project.organization if project is None or organization is None: @@ -396,6 +407,7 @@ def report_metrics_for_detectors( detectors: Sequence[PerformanceDetector], sdk_span: Any, organization: Organization, + is_standalone_spans: bool = False, ): all_detected_problems = [i for d in detectors for i in d.stored_problems] has_detected_problems = bool(all_detected_problems) @@ -410,10 +422,11 @@ def report_metrics_for_detectors( if has_detected_problems: set_tag("_pi_all_issue_count", len(all_detected_problems)) set_tag("_pi_sdk_name", sdk_name or "") + set_tag("is_standalone_spans", is_standalone_spans) metrics.incr( "performance.performance_issue.aggregate", len(all_detected_problems), - tags={"sdk_name": sdk_name}, + tags={"sdk_name": sdk_name, "is_standalone_spans": is_standalone_spans}, ) if event_id: set_tag("_pi_transaction", event_id) @@ -444,6 +457,7 @@ def report_metrics_for_detectors( detected_tags = { "sdk_name": sdk_name, "is_early_adopter": organization.flags.early_adopter.is_set, + "is_standalone_spans": is_standalone_spans, } event_integrations = event.get("sdk", {}).get("integrations", []) or [] diff --git a/tests/sentry/tasks/test_spans.py b/tests/sentry/tasks/test_spans.py index f99d7c0c6a3b74..ea0c32b54f0f23 100644 --- a/tests/sentry/tasks/test_spans.py +++ b/tests/sentry/tasks/test_spans.py @@ -85,7 +85,7 @@ def repeating_span(): assert ( job["performance_problems"][0].fingerprint - == "1-GroupType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES-f906d576ffde8f005fd741f7b9c8a35062361e67" + == "1-GroupType.PERFORMANCE_N_PLUS_ONE_DB_QUERIES-f906d576ffde8f005fd741f7b9c8a35062361e67-1019" ) assert job["performance_problems"][0].type == PerformanceStreamedSpansGroupTypeExperimental diff --git a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py index 667dae0bef73b3..ffd3236d801de6 100644 --- a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py +++ b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py @@ -127,6 +127,7 @@ def test_m_n_plus_one_detector_enabled(self): [ call("_pi_all_issue_count", 1), call("_pi_sdk_name", "sentry.javascript.node"), + call("is_standalone_spans", False), call("_pi_transaction", "3818ae4f54ba4fa6ac6f68c9e32793c4"), call( "_pi_m_n_plus_one_db_fp", diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py index e5f0e0b36d6466..f8658bf5625866 100644 --- a/tests/sentry/utils/performance_issues/test_performance_detection.py +++ b/tests/sentry/utils/performance_issues/test_performance_detection.py @@ -421,7 +421,7 @@ def test_detects_multiple_performance_issues_in_n_plus_one_query(self): perf_problems = _detect_performance_problems(n_plus_one_event, sdk_span_mock, self.project) - assert sdk_span_mock.containing_transaction.set_tag.call_count == 7 + assert sdk_span_mock.containing_transaction.set_tag.call_count == 8 sdk_span_mock.containing_transaction.set_tag.assert_has_calls( [ call( @@ -432,6 +432,7 @@ def test_detects_multiple_performance_issues_in_n_plus_one_query(self): "_pi_sdk_name", "", ), + call("is_standalone_spans", False), call( "_pi_transaction", "da78af6000a6400aaa87cf6e14ddeb40", From 1445bebad51795f5bb74531823f6005be9881455 Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 6 Mar 2024 16:19:10 +0100 Subject: [PATCH 097/145] Remove rollout options for Rust Enhancers (#65966) We have fully enabled these, so lets remove all the options usage related to it. This is pretty much a followup from #65533. --- src/sentry/grouping/enhancer/__init__.py | 64 ++++++-------------- tests/sentry/grouping/test_categorization.py | 7 +-- tests/sentry/grouping/test_enhancer.py | 21 ++----- 3 files changed, 23 insertions(+), 69 deletions(-) diff --git a/src/sentry/grouping/enhancer/__init__.py b/src/sentry/grouping/enhancer/__init__.py index 35df2d586804b5..dbb17c9bf44d9a 100644 --- a/src/sentry/grouping/enhancer/__init__.py +++ b/src/sentry/grouping/enhancer/__init__.py @@ -3,7 +3,6 @@ import base64 import logging import os -import random import zlib from collections.abc import Sequence from hashlib import md5 @@ -19,8 +18,7 @@ from sentry_ophio.enhancers import Cache as RustCache from sentry_ophio.enhancers import Enhancements as RustEnhancements -from sentry import options, projectoptions -from sentry.features.rollout import in_random_rollout +from sentry import projectoptions from sentry.grouping.component import GroupingComponent from sentry.stacktraces.functions import set_in_app from sentry.utils import metrics @@ -150,37 +148,25 @@ def merge_rust_enhancements( def parse_rust_enhancements( - source: Literal["config_structure", "config_string"], input: str | bytes, force_parsing=False + source: Literal["config_structure", "config_string"], input: str | bytes ) -> RustEnhancements | None: """ Parses ``RustEnhancements`` from either a msgpack-encoded `config_structure`, or from the text representation called `config_string`. - - Parsing itself is controlled via an option, but can be forced via `force_parsing`. """ rust_enhancements = None - parse_rust_enhancements = force_parsing - if not force_parsing: - try: - parse_rust_enhancements = random.random() < options.get( - "grouping.rust_enhancers.parse_rate" - ) - except Exception: - parse_rust_enhancements = False - - if parse_rust_enhancements: - try: - if source == "config_structure": - assert isinstance(input, bytes) - rust_enhancements = RustEnhancements.from_config_structure(input, RUST_CACHE) - else: - assert isinstance(input, str) - rust_enhancements = RustEnhancements.parse(input, RUST_CACHE) + try: + if source == "config_structure": + assert isinstance(input, bytes) + rust_enhancements = RustEnhancements.from_config_structure(input, RUST_CACHE) + else: + assert isinstance(input, str) + rust_enhancements = RustEnhancements.parse(input, RUST_CACHE) - metrics.incr("rust_enhancements.parsing_performed", tags={"source": source}) - except Exception: - logger.exception("failed parsing Rust Enhancements from `%s`", source) + metrics.incr("rust_enhancements.parsing_performed", tags={"source": source}) + except Exception: + logger.exception("failed parsing Rust Enhancements from `%s`", source) return rust_enhancements @@ -201,13 +187,6 @@ def apply_rust_enhancements( if not rust_enhancements: return None - try: - use_rust_enhancements = in_random_rollout("grouping.rust_enhancers.modify_frames_rate") - except Exception: - use_rust_enhancements = False - if not use_rust_enhancements: - return None - try: e = exception_data or {} e = { @@ -249,13 +228,6 @@ def compare_rust_enhancers( sentry_sdk.capture_message("Rust Enhancements mismatch") -def prefer_rust_enhancers(): - try: - return in_random_rollout("grouping.rust_enhancers.prefer_rust_result") - except Exception: - return False - - class Enhancements: # NOTE: You must add a version to ``VERSIONS`` any time attributes are added # to this class, s.t. no enhancements lacking these attributes are loaded @@ -299,13 +271,15 @@ def apply_modifications_to_frame( self.rust_enhancements, match_frames, exception_data ) - if rust_enhanced_frames and prefer_rust_enhancers(): + if rust_enhanced_frames: for frame, (category, in_app) in zip(frames, rust_enhanced_frames): if in_app is not None: set_in_app(frame, in_app) if category is not None: set_path(frame, "data", "category", value=category) return + else: + logger.error("Rust enhancements were not applied successfully") in_memory_cache: dict[str, str] = {} @@ -476,8 +450,8 @@ def loads(cls, data) -> Enhancements: @classmethod @sentry_sdk.tracing.trace - def from_config_string(self, s, bases=None, id=None, force_rust_parsing=False) -> Enhancements: - rust_enhancements = parse_rust_enhancements("config_string", s, force_rust_parsing) + def from_config_string(self, s, bases=None, id=None) -> Enhancements: + rust_enhancements = parse_rust_enhancements("config_string", s) try: tree = enhancements_grammar.parse(s) @@ -815,9 +789,7 @@ def _load_configs() -> dict[str, Enhancements]: fn = fn.replace("@", ":") # NOTE: we want to force parsing the `RustEnhancements` here, as the base rules # are required for inheritance, and because they are well tested. - enhancements = Enhancements.from_config_string( - f.read(), id=fn[:-4], force_rust_parsing=True - ) + enhancements = Enhancements.from_config_string(f.read(), id=fn[:-4]) rv[fn[:-4]] = enhancements return rv diff --git a/tests/sentry/grouping/test_categorization.py b/tests/sentry/grouping/test_categorization.py index d0acab7ddf75b6..1e7e3021cbf666 100644 --- a/tests/sentry/grouping/test_categorization.py +++ b/tests/sentry/grouping/test_categorization.py @@ -43,6 +43,7 @@ If you push any intermediate step into master or even just a PR, you just leaked PII to the public and all of this will have been for nothing. """ + from __future__ import annotations import contextlib @@ -58,8 +59,6 @@ from sentry.grouping.enhancer.actions import VarAction from sentry.grouping.strategies.base import StrategyConfiguration from sentry.stacktraces.processing import normalize_stacktraces_for_grouping -from sentry.testutils.helpers.options import override_options -from sentry.testutils.pytest.fixtures import django_db_all from sentry.utils.safe import get_path _fixture_path = os.path.join(os.path.dirname(__file__), "categorization_inputs") @@ -130,10 +129,6 @@ def get_stacktrace_render(data): @pytest.mark.parametrize("input", INPUTS, ids=lambda x: x.filename[:-5].replace("-", "_")) -@django_db_all -@override_options( - {"grouping.rust_enhancers.parse_rate": 1.0, "grouping.rust_enhancers.modify_frames_rate": 1.0} -) def test_categorization(input: CategorizationInput, insta_snapshot, track_enhancers_coverage): # XXX: In-process re-runs using pytest-watch or whatever will behave # wrongly because input.data is reused between tests, we do this for perf. diff --git a/tests/sentry/grouping/test_enhancer.py b/tests/sentry/grouping/test_enhancer.py index 3209eff791193d..a6a9e70e66585a 100644 --- a/tests/sentry/grouping/test_enhancer.py +++ b/tests/sentry/grouping/test_enhancer.py @@ -8,7 +8,6 @@ from sentry.grouping.enhancer import Enhancements from sentry.grouping.enhancer.exceptions import InvalidEnhancerConfig from sentry.grouping.enhancer.matchers import create_match_frame -from sentry.testutils.helpers.options import override_options from sentry.testutils.pytest.fixtures import django_db_all @@ -53,14 +52,10 @@ def test_basic_parsing(insta_snapshot, version): insta_snapshot(dump_obj(enhancement)) - rust_parsing = 1.0 if version == 2 else 0.0 - with override_options({"grouping.rust_enhancers.parse_rate": rust_parsing}): - dumped = enhancement.dumps() - assert Enhancements.loads(dumped).dumps() == dumped - assert ( - Enhancements.loads(dumped)._to_config_structure() == enhancement._to_config_structure() - ) - assert isinstance(dumped, str) + dumped = enhancement.dumps() + assert Enhancements.loads(dumped).dumps() == dumped + assert Enhancements.loads(dumped)._to_config_structure() == enhancement._to_config_structure() + assert isinstance(dumped, str) def test_parsing_errors(): @@ -80,10 +75,6 @@ def test_callee_recursion(): Enhancements.from_config_string(" category:foo | [ category:bar ] | [ category:baz ] +app") -@django_db_all -@override_options( - {"grouping.rust_enhancers.parse_rate": 1.0, "grouping.rust_enhancers.modify_frames_rate": 1.0} -) def test_flipflop_inapp(): enhancement = Enhancements.from_config_string( """ @@ -487,10 +478,6 @@ def test_sentinel_and_prefix(action, type): assert getattr(component, f"is_{type}_frame") is expected -@django_db_all -@override_options( - {"grouping.rust_enhancers.parse_rate": 1.0, "grouping.rust_enhancers.modify_frames_rate": 1.0} -) @pytest.mark.parametrize( "frame", [ From de9eeddfd80d5d48b053792beba613d9aaa7f4dd Mon Sep 17 00:00:00 2001 From: Francesco Vigliaturo Date: Wed, 6 Mar 2024 16:19:21 +0100 Subject: [PATCH 098/145] feat(profiling): pass internal (profile-scoped) dsn for function metrics ingestion to vroom (#66300) --- src/sentry/profiles/task.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/sentry/profiles/task.py b/src/sentry/profiles/task.py index 7d31ff7055dab7..86c53714c608a8 100644 --- a/src/sentry/profiles/task.py +++ b/src/sentry/profiles/task.py @@ -3,6 +3,7 @@ from collections.abc import Mapping, MutableMapping from copy import deepcopy from datetime import datetime, timezone +from functools import lru_cache from time import time from typing import Any @@ -21,6 +22,7 @@ from sentry.models.eventerror import EventError from sentry.models.organization import Organization from sentry.models.project import Project +from sentry.models.projectkey import ProjectKey, UseCase from sentry.profiles.device import classify_device from sentry.profiles.java import deobfuscate_signature, format_signature from sentry.profiles.utils import get_from_profiling_service @@ -136,6 +138,20 @@ def process_profile_task( set_measurement("profile.stacks.processed", len(profile["profile"]["stacks"])) set_measurement("profile.frames.processed", len(profile["profile"]["frames"])) + if options.get( + "profiling.generic_metrics.functions_ingestion.enabled" + ) and project.organization_id in options.get( + "profiling.generic_metrics.functions_ingestion.allowed_org_ids" + ): + try: + with metrics.timer("process_profile.get_metrics_dsn"): + dsn = get_metrics_dsn(project.id) + profile["options"] = { + "dsn": dsn, + } + except Exception as e: + sentry_sdk.capture_exception(e) + if not _push_profile_to_vroom(profile, project): return @@ -898,3 +914,11 @@ def clean_android_js_profile(profile: Profile): del p["event_id"] del p["release"] del p["dist"] + + +@lru_cache(maxsize=100) +def get_metrics_dsn(project_id: int) -> str: + project_key, _ = ProjectKey.objects.get_or_create( + project_id=project_id, use_case=UseCase.PROFILING.value + ) + return project_key.get_dsn(public=True) From 2b9958a56fb0bee04a6666f91b438c441ad86c55 Mon Sep 17 00:00:00 2001 From: Evan Hicks Date: Wed, 6 Mar 2024 10:30:11 -0500 Subject: [PATCH 099/145] fix(mql): Handle strings and formula with scalars bug (#66324) This PR fixes two bugs. Firstly, if a user passed an MQL string into the layer (which is completely valid) the layer would throw an exception. Secondly, if a formula had a component that was itself a formula of scalars, e.g. sum(x) + (7 * 24), an exception would be thrown. --- src/sentry/snuba/metrics_layer/query.py | 27 ++++++++++++++++--------- tests/snuba/test_metrics_layer.py | 24 ++++++++++++++++++++++ 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/src/sentry/snuba/metrics_layer/query.py b/src/sentry/snuba/metrics_layer/query.py index 8dc85d1498f532..e7b541137f968a 100644 --- a/src/sentry/snuba/metrics_layer/query.py +++ b/src/sentry/snuba/metrics_layer/query.py @@ -18,6 +18,7 @@ Timeseries, ) from snuba_sdk.formula import FormulaParameterGroup +from snuba_sdk.mql.mql import parse_mql from sentry.exceptions import InvalidParams from sentry.sentry_metrics.use_case_id_registry import UseCaseID @@ -137,6 +138,10 @@ def _setup_metrics_query(request: Request) -> tuple[Request, datetime, datetime] metrics_query = request.query assert isinstance(metrics_query, MetricsQuery) + # We allow users to pass in a string instead of a Formula/Timeseries object. Handle that case here. + if isinstance(metrics_query.query, str): + metrics_query = metrics_query.set_query(parse_mql(metrics_query.query)) + assert len(metrics_query.scope.org_ids) == 1 # Initially only allow 1 org id organization_id = metrics_query.scope.org_ids[0] tenant_ids = request.tenant_ids or {"organization_id": organization_id} @@ -265,7 +270,14 @@ def _resolve_query_metadata( assert metrics_query.query is not None org_id = metrics_query.scope.org_ids[0] - use_case_id_str = _resolve_use_case_id_str(metrics_query.query) + use_case_ids = _resolve_use_case_ids(metrics_query.query) + + if not use_case_ids: + raise InvalidParams("No use case found in formula parameters") + if len(use_case_ids) > 1: + raise InvalidParams("Formula parameters must all be from the same use case") + use_case_id_str = use_case_ids.pop() + if metrics_query.scope.use_case_id is None: metrics_query = metrics_query.set_scope( metrics_query.scope.set_use_case_id(use_case_id_str) @@ -331,7 +343,7 @@ def _resolve_timeseries_metadata( return series, mappings -def _resolve_use_case_id_str(exp: Formula | Timeseries) -> str: +def _resolve_use_case_ids(exp: Formula | Timeseries) -> set[str]: def fetch_namespace(metric: Metric) -> str: if metric.mri is None: mri = get_mri(metric.public_name) @@ -344,20 +356,15 @@ def fetch_namespace(metric: Metric) -> str: return parsed_mri.namespace if isinstance(exp, Timeseries): - return fetch_namespace(exp.metric) + return {fetch_namespace(exp.metric)} assert isinstance(exp, Formula), exp namespaces = set() for p in exp.parameters: if isinstance(p, (Formula, Timeseries)): - namespaces.add(_resolve_use_case_id_str(p)) - - if not namespaces: - raise InvalidParams("No use case found in formula parameters") - if len(namespaces) > 1: - raise InvalidParams("Formula parameters must all be from the same use case") + namespaces |= _resolve_use_case_ids(p) - return namespaces.pop() + return namespaces def _lookup_indexer_resolve( diff --git a/tests/snuba/test_metrics_layer.py b/tests/snuba/test_metrics_layer.py index 0c0c234da5dee6..90418fc9dd3be3 100644 --- a/tests/snuba/test_metrics_layer.py +++ b/tests/snuba/test_metrics_layer.py @@ -848,3 +848,27 @@ def test_resolve_all_mris(self) -> None: ) result = run_query(request) assert len(result["data"]) == 1 + + def test_formulas_with_scalar_formulas(self) -> None: + query = MetricsQuery( + query=f"sum({TransactionMRI.DURATION.value}) + (24 * 3600)", + start=self.hour_ago, + end=self.now, + rollup=Rollup(interval=60, granularity=60), + scope=MetricsScope( + org_ids=[self.org_id], + project_ids=[self.project.id], + use_case_id=UseCaseID.TRANSACTIONS.value, + ), + ) + + request = Request( + dataset="generic_metrics", + app_id="tests", + query=query, + tenant_ids={"referrer": "metrics.testing.test", "organization_id": self.org_id}, + ) + result = run_query(request) + assert len(result["data"]) == 10 + for row in result["data"]: + assert row["aggregate_value"] >= 86400 From cfd894911aa3a947ea89fe1dcab60ede4e45ea51 Mon Sep 17 00:00:00 2001 From: Mark Story Date: Wed, 6 Mar 2024 10:30:30 -0500 Subject: [PATCH 100/145] feat(hybridcloud) Add region tag to errors/transactions (#66370) Add sentry_region to the tags of errors/transactions we collect. This helps with diagnosing which region an error is occurring in. Previously we were planning on using `environment` for this but it would have required a non-trivial amount of monitoring changes. The `sentry_region` name mirrors the tag used in other monitoring tools we use. --- src/sentry/utils/sdk.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 5dbba56f16ba72..02622eef964854 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -217,8 +217,11 @@ def before_send_transaction(event, _): def before_send(event, _): - if event.get("tags") and settings.SILO_MODE: - event["tags"]["silo_mode"] = settings.SILO_MODE + if event.get("tags"): + if settings.SILO_MODE: + event["tags"]["silo_mode"] = settings.SILO_MODE + if settings.SENTRY_REGION: + event["tags"]["sentry_region"] = settings.SENTRY_REGION return event From b0a2c9eba83c2cc6bf55cd512b665e882a065acf Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:44:18 -0500 Subject: [PATCH 101/145] feat(perf): Add HTTP module domain overview page scaffold (#66359) Just some very very basic content to get some content and routing up. --- static/app/routes.tsx | 6 + .../app/views/performance/http/domainCell.tsx | 40 ++++ .../views/performance/http/domainsTable.tsx | 5 + .../http/httpDomainSummaryPage.spec.tsx | 130 ++++++++++++ .../http/httpDomainSummaryPage.tsx | 188 ++++++++++++++++++ .../performance/http/httpLandingPage.spec.tsx | 16 +- .../performance/http/httpLandingPage.tsx | 2 +- 7 files changed, 382 insertions(+), 5 deletions(-) create mode 100644 static/app/views/performance/http/domainCell.tsx create mode 100644 static/app/views/performance/http/httpDomainSummaryPage.spec.tsx create mode 100644 static/app/views/performance/http/httpDomainSummaryPage.tsx diff --git a/static/app/routes.tsx b/static/app/routes.tsx index 27b854b4e31d3e..c2a80e387fd343 100644 --- a/static/app/routes.tsx +++ b/static/app/routes.tsx @@ -1687,6 +1687,12 @@ function buildRoutes() { import('sentry/views/performance/http/httpLandingPage'))} /> + import('sentry/views/performance/http/httpDomainSummaryPage') + )} + /> diff --git a/static/app/views/performance/http/domainCell.tsx b/static/app/views/performance/http/domainCell.tsx new file mode 100644 index 00000000000000..8878e9b09a0f1c --- /dev/null +++ b/static/app/views/performance/http/domainCell.tsx @@ -0,0 +1,40 @@ +import {Link} from 'react-router'; +import * as qs from 'query-string'; + +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {OverflowEllipsisTextContainer} from 'sentry/views/starfish/components/textAlign'; + +interface Props { + domain?: string; +} + +export function DomainCell({domain}: Props) { + const location = useLocation(); + const organization = useOrganization(); + + // NOTE: This is for safety only, the product should not fetch or render rows with missing domains or project IDs + if (!domain) { + return NULL_DESCRIPTION; + } + + const queryString = { + ...location.query, + domain, + }; + + return ( + + + {domain} + + + ); +} + +const NULL_DESCRIPTION = <null>; diff --git a/static/app/views/performance/http/domainsTable.tsx b/static/app/views/performance/http/domainsTable.tsx index 49ffb17965b7c0..e9d2f8e737ba80 100644 --- a/static/app/views/performance/http/domainsTable.tsx +++ b/static/app/views/performance/http/domainsTable.tsx @@ -14,6 +14,7 @@ import {RATE_UNIT_TITLE, RateUnit} from 'sentry/utils/discover/fields'; import {VisuallyCompleteWithData} from 'sentry/utils/performanceForSentry'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; +import {DomainCell} from 'sentry/views/performance/http/domainCell'; import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell'; import type {MetricsResponse} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; @@ -135,6 +136,10 @@ function renderBodyCell( location: Location, organization: Organization ) { + if (column.key === 'span.domain') { + return ; + } + if (!meta?.fields) { return row[column.key]; } diff --git a/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx b/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx new file mode 100644 index 00000000000000..e657ac38ac2a46 --- /dev/null +++ b/static/app/views/performance/http/httpDomainSummaryPage.spec.tsx @@ -0,0 +1,130 @@ +import {OrganizationFixture} from 'sentry-fixture/organization'; + +import {render, screen, waitForElementToBeRemoved} from 'sentry-test/reactTestingLibrary'; + +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import usePageFilters from 'sentry/utils/usePageFilters'; +import {HTTPDomainSummaryPage} from 'sentry/views/performance/http/httpDomainSummaryPage'; + +jest.mock('sentry/utils/useLocation'); +jest.mock('sentry/utils/usePageFilters'); +jest.mock('sentry/utils/useOrganization'); + +describe('HTTPSummaryPage', function () { + const organization = OrganizationFixture(); + + let domainChartsRequestMock; + + jest.mocked(usePageFilters).mockReturnValue({ + isReady: true, + desyncedFilters: new Set(), + pinnedFilters: new Set(), + shouldPersist: true, + selection: { + datetime: { + period: '10d', + start: null, + end: null, + utc: false, + }, + environments: [], + projects: [], + }, + }); + + jest.mocked(useLocation).mockReturnValue({ + pathname: '', + search: '', + query: {domain: '*.sentry.dev', statsPeriod: '10d'}, + hash: '', + state: undefined, + action: 'PUSH', + key: '', + }); + + jest.mocked(useOrganization).mockReturnValue(organization); + + beforeEach(function () { + MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events/`, + method: 'GET', + body: { + data: [], + }, + }); + + domainChartsRequestMock = MockApiClient.addMockResponse({ + url: `/organizations/${organization.slug}/events-stats/`, + method: 'GET', + body: { + 'spm()': { + data: [ + [1699907700, [{count: 7810.2}]], + [1699908000, [{count: 1216.8}]], + ], + }, + }, + }); + }); + + afterAll(function () { + jest.resetAllMocks(); + }); + + it('fetches module data', async function () { + render(); + + expect(domainChartsRequestMock).toHaveBeenNthCalledWith( + 1, + `/organizations/${organization.slug}/events-stats/`, + expect.objectContaining({ + method: 'GET', + query: { + cursor: undefined, + dataset: 'spansMetrics', + environment: [], + excludeOther: 0, + field: [], + interval: '30m', + orderby: undefined, + partial: 1, + per_page: 50, + project: [], + query: 'span.module:http span.domain:"\\*.sentry.dev"', + referrer: 'api.starfish.http-module-domain-summary-throughput-chart', + statsPeriod: '10d', + topEvents: undefined, + yAxis: 'spm()', + }, + }) + ); + + expect(domainChartsRequestMock).toHaveBeenNthCalledWith( + 2, + `/organizations/${organization.slug}/events-stats/`, + expect.objectContaining({ + method: 'GET', + query: { + cursor: undefined, + dataset: 'spansMetrics', + environment: [], + excludeOther: 0, + field: [], + interval: '30m', + orderby: undefined, + partial: 1, + per_page: 50, + project: [], + query: 'span.module:http span.domain:"\\*.sentry.dev"', + referrer: 'api.starfish.http-module-domain-summary-duration-chart', + statsPeriod: '10d', + topEvents: undefined, + yAxis: 'avg(span.self_time)', + }, + }) + ); + + await waitForElementToBeRemoved(() => screen.queryAllByTestId('loading-indicator')); + }); +}); diff --git a/static/app/views/performance/http/httpDomainSummaryPage.tsx b/static/app/views/performance/http/httpDomainSummaryPage.tsx new file mode 100644 index 00000000000000..494754919700d2 --- /dev/null +++ b/static/app/views/performance/http/httpDomainSummaryPage.tsx @@ -0,0 +1,188 @@ +import React from 'react'; +import styled from '@emotion/styled'; + +import {Breadcrumbs} from 'sentry/components/breadcrumbs'; +import FloatingFeedbackWidget from 'sentry/components/feedback/widget/floatingFeedbackWidget'; +import * as Layout from 'sentry/components/layouts/thirds'; +import {DatePageFilter} from 'sentry/components/organizations/datePageFilter'; +import {EnvironmentPageFilter} from 'sentry/components/organizations/environmentPageFilter'; +import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; +import {t} from 'sentry/locale'; +import {space} from 'sentry/styles/space'; +import {DurationUnit, RateUnit} from 'sentry/utils/discover/fields'; +import {useLocation} from 'sentry/utils/useLocation'; +import useOrganization from 'sentry/utils/useOrganization'; +import {normalizeUrl} from 'sentry/utils/withDomainRequired'; +import {DurationChart} from 'sentry/views/performance/database/durationChart'; +import {ThroughputChart} from 'sentry/views/performance/database/throughputChart'; +import {useSelectedDurationAggregate} from 'sentry/views/performance/database/useSelectedDurationAggregate'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; +import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; +import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; +import {useSynchronizeCharts} from 'sentry/views/starfish/components/chart'; +import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics'; +import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useSpanMetricsSeries'; +import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; +import {ModuleName, SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; +import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; + +type Query = { + aggregate?: string; + domain?: string; +}; + +export function HTTPDomainSummaryPage() { + const location = useLocation(); + const organization = useOrganization(); + + const [selectedAggregate] = useSelectedDurationAggregate(); + + const {domain} = location.query; + + const filters: SpanMetricsQueryFilters = { + 'span.module': ModuleName.HTTP, + 'span.domain': domain, + }; + + const {data: domainMetrics, isLoading: areDomainMetricsLoading} = useSpanMetrics({ + filters, + fields: [ + SpanMetricsField.SPAN_DOMAIN, + `${SpanFunction.SPM}()`, + `avg(${SpanMetricsField.SPAN_SELF_TIME})`, + `sum(${SpanMetricsField.SPAN_SELF_TIME})`, + `${SpanFunction.TIME_SPENT_PERCENTAGE}()`, + ], + referrer: 'api.starfish.http-module-domain-summary-metrics-ribbon', + }); + + const { + isLoading: isThroughputDataLoading, + data: throughputData, + error: throughputError, + } = useSpanMetricsSeries({ + filters, + yAxis: ['spm()'], + referrer: 'api.starfish.http-module-domain-summary-throughput-chart', + }); + + const { + isLoading: isDurationDataLoading, + data: durationData, + error: durationError, + } = useSpanMetricsSeries({ + filters, + yAxis: [`${selectedAggregate}(${SpanMetricsField.SPAN_SELF_TIME})`], + referrer: 'api.starfish.http-module-domain-summary-duration-chart', + }); + + useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); + + return ( + + + + + {domain} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +const HeaderContainer = styled('div')` + display: flex; + justify-content: space-between; + flex-wrap: wrap; +`; + +const MetricsRibbon = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(4)}; +`; + +function LandingPageWithProviders() { + return ( + + + + ); +} + +export default LandingPageWithProviders; diff --git a/static/app/views/performance/http/httpLandingPage.spec.tsx b/static/app/views/performance/http/httpLandingPage.spec.tsx index 255f46173f931e..0258639ababcd9 100644 --- a/static/app/views/performance/http/httpLandingPage.spec.tsx +++ b/static/app/views/performance/http/httpLandingPage.spec.tsx @@ -50,7 +50,9 @@ describe('HTTPLandingPage', function () { url: `/organizations/${organization.slug}/events/`, method: 'GET', match: [ - MockApiClient.matchQuery({referrer: 'api.starfish.http-module-domains-list'}), + MockApiClient.matchQuery({ + referrer: 'api.starfish.http-module-landing-domains-list', + }), ], body: { data: [ @@ -153,7 +155,7 @@ describe('HTTPLandingPage', function () { per_page: 10, project: [], query: 'span.module:http has:span.domain', - referrer: 'api.starfish.http-module-domains-list', + referrer: 'api.starfish.http-module-landing-domains-list', sort: '-time_spent_percentage()', statsPeriod: '10d', }, @@ -168,7 +170,13 @@ describe('HTTPLandingPage', function () { await waitForElementToBeRemoved(() => screen.queryAllByTestId('loading-indicator')); - expect(screen.getByRole('cell', {name: '*.sentry.io'})).toBeInTheDocument(); - expect(screen.getByRole('cell', {name: '*.github.com'})).toBeInTheDocument(); + expect(screen.getByRole('link', {name: '*.sentry.io'})).toHaveAttribute( + 'href', + '/organizations/org-slug/performance/http/domains/?domain=%2A.sentry.io&statsPeriod=10d' + ); + expect(screen.getByRole('link', {name: '*.github.com'})).toHaveAttribute( + 'href', + '/organizations/org-slug/performance/http/domains/?domain=%2A.github.com&statsPeriod=10d' + ); }); }); diff --git a/static/app/views/performance/http/httpLandingPage.tsx b/static/app/views/performance/http/httpLandingPage.tsx index 5807baddcd047b..31797561ce3645 100644 --- a/static/app/views/performance/http/httpLandingPage.tsx +++ b/static/app/views/performance/http/httpLandingPage.tsx @@ -81,7 +81,7 @@ export function HTTPLandingPage() { sorts: [sort], limit: DOMAIN_TABLE_ROW_COUNT, cursor, - referrer: 'api.starfish.http-module-domains-list', + referrer: 'api.starfish.http-module-landing-domains-list', }); useSynchronizeCharts([!isThroughputDataLoading && !isDurationDataLoading]); From 345f8d94d10ce7b91a3acc3c0f5b23c1f1cb5f93 Mon Sep 17 00:00:00 2001 From: Kev <6111995+k-fish@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:59:08 -0500 Subject: [PATCH 102/145] revert(sdk): Move us back to 7.102 js sdk (#66412) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit …integration (#66317)" This reverts commit f4c49786a4da31143cf50fab44c41f7e37f47dbc. Looking for the reason for the drop in pageloads. --- package.json | 12 +- static/app/bootstrap/initializeSdk.tsx | 32 ++--- tests/js/setup.ts | 3 - yarn.lock | 168 ++++++++++++------------- 4 files changed, 107 insertions(+), 108 deletions(-) diff --git a/package.json b/package.json index 767272f0463a93..4c85db3850f96c 100644 --- a/package.json +++ b/package.json @@ -55,13 +55,13 @@ "@sentry-internal/rrweb-player": "2.9.0", "@sentry-internal/rrweb-snapshot": "2.9.0", "@sentry/babel-plugin-component-annotate": "^2.14.0", - "@sentry/core": "7.103.0", - "@sentry/integrations": "7.103.0", - "@sentry/node": "7.103.0", - "@sentry/react": "7.103.0", + "@sentry/core": "^7.102.0", + "@sentry/integrations": "^7.102.0", + "@sentry/node": "^7.102.0", + "@sentry/react": "^7.102.0", "@sentry/release-parser": "^1.3.1", - "@sentry/types": "7.103.0", - "@sentry/utils": "7.103.0", + "@sentry/types": "^7.102.0", + "@sentry/utils": "^7.102.0", "@spotlightjs/spotlight": "^1.2.13", "@tanstack/react-query": "^4.29.7", "@types/color": "^3.0.3", diff --git a/static/app/bootstrap/initializeSdk.tsx b/static/app/bootstrap/initializeSdk.tsx index 8050a2a7c07be9..b49b87c1a3a812 100644 --- a/static/app/bootstrap/initializeSdk.tsx +++ b/static/app/bootstrap/initializeSdk.tsx @@ -2,6 +2,7 @@ import {browserHistory, createRoutes, match} from 'react-router'; import {extraErrorDataIntegration} from '@sentry/integrations'; import * as Sentry from '@sentry/react'; +import {BrowserTracing} from '@sentry/react'; import {_browserPerformanceTimeOriginMode} from '@sentry/utils'; import type {Event} from '@sentry/types'; @@ -50,21 +51,22 @@ function getSentryIntegrations(routes?: Function) { depth: 6, }), Sentry.metrics.metricsAggregatorIntegration(), - typeof routes === 'function' - ? Sentry.reactRouterV3BrowserTracingIntegration({ - history: browserHistory as any, - routes: createRoutes(routes()), - match, - _experiments: { - enableInteractions: true, - }, - }) - : Sentry.browserTracingIntegration({ - _experiments: { - enableInteractions: true, - }, - }), - Sentry.browserProfilingIntegration(), + new BrowserTracing({ + ...(typeof routes === 'function' + ? { + routingInstrumentation: Sentry.reactRouterV3Instrumentation( + browserHistory as any, + createRoutes(routes()), + match + ), + } + : {}), + _experiments: { + enableInteractions: true, + onStartRouteTransaction: Sentry.onProfilingStartRouteTransaction, + }, + }), + new Sentry.BrowserProfilingIntegration(), ]; return integrations; diff --git a/tests/js/setup.ts b/tests/js/setup.ts index b3195f7e5a6bb5..33cf69e1e837b8 100644 --- a/tests/js/setup.ts +++ b/tests/js/setup.ts @@ -124,9 +124,6 @@ jest.mock('@sentry/react', function sentryReact() { }, BrowserTracing: jest.fn().mockReturnValue({}), BrowserProfilingIntegration: jest.fn().mockReturnValue({}), - browserTracingIntegration: jest.fn().mockReturnValue({}), - reactRouterV3BrowserTracingIntegration: jest.fn().mockReturnValue({}), - browserProfilingIntegration: jest.fn().mockReturnValue({}), addGlobalEventProcessor: jest.fn(), BrowserClient: jest.fn().mockReturnValue({ captureEvent: jest.fn(), diff --git a/yarn.lock b/yarn.lock index ff268188eed53c..249581acbb68e4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2823,14 +2823,14 @@ zod "^3.21.4" zod-validation-error "1.3.1" -"@sentry-internal/feedback@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-7.103.0.tgz#0b382b8fc9fc583be1164ff80df07fbe87b565b3" - integrity sha512-2nYoCfP7FpiUR+xxO5y5BL2ajHrhM4fL7HSup6QKNn7gI7vLyllYOOuYFNHhSmsXCD0i00U8DBClGLcn+6DQqw== +"@sentry-internal/feedback@7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-7.102.0.tgz#c3c7f5cfe9726b6d4d1917bd5bd01238154f8ee7" + integrity sha512-GxHdzbOF4tg6TtyQzFqb/8c/p07n68qZC5KYwzs7AuW5ey0IPmdC58pOh3Kk52JA0P69/RZy39+r1p1Swr6C+Q== dependencies: - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" "@sentry-internal/global-search@^0.5.7": version "0.5.7" @@ -2851,15 +2851,15 @@ resolved "https://registry.yarnpkg.com/@sentry-internal/react-inspector/-/react-inspector-6.0.1-4.tgz#10758f3461cf2cf48df8c80f0514c55ca18872c5" integrity sha512-uL2RyvW8EqDEchnbo8Hu/c4IpBqM3LLxUpZPHs8o40kynerzPset6bC/m5SU124gEhy4PqjdvJ7DhTYR75NetQ== -"@sentry-internal/replay-canvas@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-7.103.0.tgz#d601e56c04fd51b226f6c2933e28a8ce6955e3ba" - integrity sha512-EyDRMdlSqtwY8zGFhOWwl+nwwo98hlhJz+bpF5PQ6VmFpbplh6Wqfx2p+cPXQr40TGMMC4+vPFlSWTOMjcO9zQ== +"@sentry-internal/replay-canvas@7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/replay-canvas/-/replay-canvas-7.102.0.tgz#2bed235bbcb22eea71c39650096d21a3ed32f466" + integrity sha512-rgNO4PdFv0AYflBsCNbSIwpQuOOJQTqyu8i8U0PupjveNjkm0CUJhber/ZOcaGmbyjdvwikGwgWY2O0Oj0USCA== dependencies: - "@sentry/core" "7.103.0" - "@sentry/replay" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry/core" "7.102.0" + "@sentry/replay" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" "@sentry-internal/rrdom@2.9.0": version "2.9.0" @@ -2902,49 +2902,49 @@ fflate "^0.4.4" mitt "^3.0.0" -"@sentry-internal/tracing@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.103.0.tgz#b18ef65f610099ee2fc74f91f9ccfdb0353580c4" - integrity sha512-sZ/Wao8HOvGaBs7WlOdflMpHGAFkOBWL6hBiirHaOy5d+IDm7n7et5U6zhvcfiyYBO4nY36gy1Tg5mw+aNO0Vw== +"@sentry-internal/tracing@7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.102.0.tgz#24cf662e1eb5623f6d5197e78c66d7b257560eb8" + integrity sha512-BlE33HWL1IzkGa0W+pwTiyu01MUIfYf+WnO9UC8qkDW3jxVvg2zhoSjXSxikT+KPCOgoZpQHspaTzwjnI1LCvw== dependencies: - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" "@sentry/babel-plugin-component-annotate@^2.14.0": version "2.14.0" resolved "https://registry.yarnpkg.com/@sentry/babel-plugin-component-annotate/-/babel-plugin-component-annotate-2.14.0.tgz#e62f448dd3c922a6d32e9f1c0a5ae85fa6ec22c2" integrity sha512-FWU4+Lx6fgxjAkwmc3S9j1Q/6pqKZyZzfi52B+8WMNw7a5QjGXgxc5ucBazZYgrcsJKCFBp4QG3PPxNAieFimQ== -"@sentry/browser@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.103.0.tgz#b509394d238e67f6225339c242701710ea347508" - integrity sha512-lP3Oplnwo1lY8ltk8SWzQURbxnSfVhYA099mVs1T95sdwXS16Za6SX7Ld/9T506ZW/WyoU4VCq7eKtG2kPFhMQ== - dependencies: - "@sentry-internal/feedback" "7.103.0" - "@sentry-internal/replay-canvas" "7.103.0" - "@sentry-internal/tracing" "7.103.0" - "@sentry/core" "7.103.0" - "@sentry/replay" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" - -"@sentry/core@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.103.0.tgz#8f626362c96f9aa4b4a52042c431d16372491dc1" - integrity sha512-LCI+PIDoF/RLqN41fNXum3ilmS6ukni6L7t38vSdibbe2G0804EbPLtOIpv2PkS8E6CFuRW5zOb+8OwEAAtZWw== - dependencies: - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" - -"@sentry/integrations@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/integrations/-/integrations-7.103.0.tgz#58a52a1644ec18a49bcf6091e6f847036d64c679" - integrity sha512-jS1vQqBBF776xFpht4xS5cJRztbpskFELeZX57pELzy/J7PNjbO0/oypP1qK7budMxxkazJhkcNwJw9eUFT0pg== - dependencies: - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" +"@sentry/browser@7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.102.0.tgz#335f51d01aabf8c4d2abc871855f9c2d19f8f70d" + integrity sha512-hIggcMnojIbWhbmlRfkykHmy6n7pjug0AHfF19HRUQxAx9KJfMH5YdWvohov0Hb9fS+jdvqgE+/4AWbEeXQrHw== + dependencies: + "@sentry-internal/feedback" "7.102.0" + "@sentry-internal/replay-canvas" "7.102.0" + "@sentry-internal/tracing" "7.102.0" + "@sentry/core" "7.102.0" + "@sentry/replay" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" + +"@sentry/core@7.102.0", "@sentry/core@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.102.0.tgz#da5e04a5fe97ed91464944dac40b813e6f8aa453" + integrity sha512-GO9eLOSBK1waW4AD0wDXAreaNqXFQ1MPQZrkKcN+GJYEFhJK1+u+MSV7vO5Fs/rIfaTZIZ2jtEkxSSAOucE8EQ== + dependencies: + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" + +"@sentry/integrations@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/integrations/-/integrations-7.102.0.tgz#27045133517b97dd21b83fb270810b3675a58745" + integrity sha512-WW7DiAcihi+Fya2YrB6lEUzDAIPuO23wDm4tLJ9vQpMw4LaTj/XkulITTXFI7XLJLzs5Eks9pIfZJdmKrqjchA== + dependencies: + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" localforage "^1.8.1" "@sentry/jest-environment@^4.0.0": @@ -2952,15 +2952,15 @@ resolved "https://registry.yarnpkg.com/@sentry/jest-environment/-/jest-environment-4.0.0.tgz#037844bed70c8f13259ee01ab65ff8d36aef0209" integrity sha512-91jLBS8KbX2Ng0aDSP7kdE9sjiLc4qjp/jczTbmvOvuHxoaQ9hSLaEpsthnnUQ/zNeprZMkOC9xlS+zABw3Zmw== -"@sentry/node@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.103.0.tgz#9cf488086717c2920c43568432d14232b6783a9e" - integrity sha512-/bS/WNeO+PEd0r3o3LN0XGJV+l7hLNy1dTcn61VRgWGVs8SqMBb3uAvXAibZ9zGTCkaX/Ky3JumMcOOoxmNCtg== +"@sentry/node@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.102.0.tgz#f2853bad8650b1f94a57ae3bafad3440740f98ab" + integrity sha512-ZS1s2uO/+K4rHkmWjyqm5Jtl6dT7klbZSMvn4tfIpkfWuqrs7pP0jaATyvmF+96z3lpq6fRAJliV5tRqPy7w5Q== dependencies: - "@sentry-internal/tracing" "7.103.0" - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry-internal/tracing" "7.102.0" + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" "@sentry/profiling-node@^1.3.5": version "1.3.5" @@ -2970,15 +2970,15 @@ detect-libc "^2.0.2" node-abi "^3.52.0" -"@sentry/react@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/react/-/react-7.103.0.tgz#797016011b333963db9dc26dcc4540b69c331901" - integrity sha512-EyWZi9u94RShzAK/vpkXNCdkocMtb98AbxRZOjuFL+pf4jjwxdPRekGGzXz7St0cf56Mv3oHNk6QGDA0PF7mlQ== +"@sentry/react@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/react/-/react-7.102.0.tgz#1997518c60a0b27f2c9b540937bc29af0ffb6179" + integrity sha512-Dz2JZwQMU/gpAVRHz6usMGgDF5Y0QcPUAnRoNpewEanZW7nChN8FsIYjOkvEbbsgk8bAlAjWErNlKGfl0B3YoA== dependencies: - "@sentry/browser" "7.103.0" - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry/browser" "7.102.0" + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" hoist-non-react-statics "^3.3.2" "@sentry/release-parser@^1.3.1": @@ -2986,27 +2986,27 @@ resolved "https://registry.yarnpkg.com/@sentry/release-parser/-/release-parser-1.3.1.tgz#0ab8be23fd494d80dd0e4ec8ae5f3d13f805b13d" integrity sha512-/dGpCq+j3sJhqQ14RNEEL45Ot/rgq3jAlZDD/8ufeqq+W8p4gUhSrbGWCRL82NEIWY9SYwxYXGXjRcVPSHiA1Q== -"@sentry/replay@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.103.0.tgz#6b420f1a33784e76533bb72ea9743e8ad34c0b23" - integrity sha512-I37komyb+DruQG8lPPPOFxLLbOijNXeTxiWLsIn+KFZqRtKqxxQWdNnk56V4YSTpFzxnMEFMRCpXhncuTWu4LA== +"@sentry/replay@7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.102.0.tgz#209b7adb68e89772824218ecab498d3a6fbc2c42" + integrity sha512-sUIBN4ZY0J5/dQS3KOe5VLykm856KZkTrhV8kmBEylzQhw1BBc8i2ehTILy5ZYh9Ra8uXPTAmtwpvYf/dRDfAg== dependencies: - "@sentry-internal/tracing" "7.103.0" - "@sentry/core" "7.103.0" - "@sentry/types" "7.103.0" - "@sentry/utils" "7.103.0" + "@sentry-internal/tracing" "7.102.0" + "@sentry/core" "7.102.0" + "@sentry/types" "7.102.0" + "@sentry/utils" "7.102.0" -"@sentry/types@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.103.0.tgz#f413f922216c97ec86bae39f9d527669d8afedbd" - integrity sha512-NCvKyx8d2AGBQKPARrJemZmZ16DiMo688OEikZg4BbvFNDUzK5Egm2BH0vfLDhbNkU19o3maJowrYo42m8r9Zw== +"@sentry/types@7.102.0", "@sentry/types@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.102.0.tgz#b31e9faa54036053ab82c09c3c855035a4889c59" + integrity sha512-FPfFBP0x3LkPARw1/6cWySLq1djIo8ao3Qo2KNBeE9CHdq8bsS1a8zzjJLuWG4Ww+wieLP8/lY3WTgrCz4jowg== -"@sentry/utils@7.103.0": - version "7.103.0" - resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.103.0.tgz#803b76e2adfdcec0d4ab6369cc8990dde19b55f4" - integrity sha512-phkUJt3F0UOkVq+M4GfdAh2ewI3ASrNiJddx9aO7GnT0aDwwVBHZltnqt95qgAB8W+BipTSt1dAh8yUbbq1Ceg== +"@sentry/utils@7.102.0", "@sentry/utils@^7.102.0": + version "7.102.0" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.102.0.tgz#66325f2567986cc3fd12fbdb980fb8ada170342b" + integrity sha512-cp5KCRe0slOVMwG4iP2Z4UajQkjryRTiFskZ5H7Q3X9R5voM8+DAhiDcIW88GL9NxqyUrAJOjmKdeLK2vM+bdA== dependencies: - "@sentry/types" "7.103.0" + "@sentry/types" "7.102.0" "@sinclair/typebox@^0.27.8": version "0.27.8" From e3330ff37d057ac7fe1cf9053bc7e4ab84889fd5 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 6 Mar 2024 08:46:31 -0800 Subject: [PATCH 103/145] ref(onboarding): refactor useCurrentProjectState to be generalized (#66118) - DDM, Replays, and Feedback onboarding were all using some variation of this hook: `useCurrentProjectState`. They're all pretty similar, so I refactored them into one general reusable hook - I modified the respective `sidebar.tsx` files to use this new hook - I also added a hook test --- .../feedback/feedbackOnboarding/sidebar.tsx | 15 +- .../utils/useCurrentProjectState.spec.tsx | 138 ++++++++++++++++++ .../utils}/useCurrentProjectState.tsx | 54 +++++-- .../components/replaysOnboarding/sidebar.tsx | 6 +- .../useCurrentProjectState.tsx | 99 ------------- .../components/replaysOnboarding/utils.tsx | 14 +- static/app/data/platformCategories.tsx | 11 +- .../app/views/ddm/ddmOnboarding/sidebar.tsx | 15 +- .../ddmOnboarding/useCurrentProjectState.tsx | 93 ------------ 9 files changed, 212 insertions(+), 233 deletions(-) create mode 100644 static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx rename static/app/components/{feedback/feedbackOnboarding => onboarding/gettingStartedDoc/utils}/useCurrentProjectState.tsx (53%) delete mode 100644 static/app/components/replaysOnboarding/useCurrentProjectState.tsx delete mode 100644 static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index f96669948b06ff..3094025285d62d 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -8,12 +8,12 @@ import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right. import {Button} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeedbackOnboardingLayout} from 'sentry/components/feedback/feedbackOnboarding/feedbackOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/feedback/feedbackOnboarding/useCurrentProjectState'; import useLoadFeedbackOnboardingDoc from 'sentry/components/feedback/feedbackOnboarding/useLoadFeedbackOnboardingDoc'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {FeedbackOnboardingWebApiBanner} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {replayJsFrameworkOptions} from 'sentry/components/replaysOnboarding/utils'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; @@ -43,12 +43,15 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; const hasProjectAccess = organization.access.includes('project:read'); - const {projects, currentProject, setCurrentProject} = useCurrentProjectState({ + const {allProjects, currentProject, setCurrentProject} = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, }); const projectSelectOptions = useMemo(() => { - const supportedProjectItems: SelectValue[] = projects + const supportedProjectItems: SelectValue[] = allProjects .sort((aProject, bProject) => { // if we're comparing two projects w/ or w/o feedback alphabetical sort if (aProject.hasNewFeedbacks === bProject.hasNewFeedbacks) { @@ -73,7 +76,7 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { options: supportedProjectItems, }, ]; - }, [projects]); + }, [allProjects]); if (!isActive || !hasProjectAccess || !currentProject) { return null; @@ -112,7 +115,9 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { ) } value={currentProject?.id} - onChange={opt => setCurrentProject(projects.find(p => p.id === opt.value))} + onChange={opt => + setCurrentProject(allProjects.find(p => p.id === opt.value)) + } triggerProps={{'aria-label': currentProject?.slug}} options={projectSelectOptions} position="bottom-end" diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx new file mode 100644 index 00000000000000..7fe73d4aee2647 --- /dev/null +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx @@ -0,0 +1,138 @@ +import {ProjectFixture} from 'sentry-fixture/project'; + +import {reactHooks} from 'sentry-test/reactTestingLibrary'; + +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, + feedbackOnboardingPlatforms, + replayOnboardingPlatforms, + replayPlatforms, +} from 'sentry/data/platformCategories'; +import PageFiltersStore from 'sentry/stores/pageFiltersStore'; +import ProjectsStore from 'sentry/stores/projectsStore'; +import type {Project} from 'sentry/types'; + +function mockPageFilterStore(projects: Project[]) { + PageFiltersStore.init(); + PageFiltersStore.onInitializeUrlState( + { + projects: projects.map(p => parseInt(p.id, 10)), + environments: [], + datetime: { + period: '7d', + start: null, + end: null, + utc: null, + }, + }, + new Set() + ); +} + +describe('useCurrentProjectState', () => { + const rust_1 = ProjectFixture({id: '1', platform: 'rust'}); + const rust_2 = ProjectFixture({id: '2', platform: 'rust'}); + const javascript = ProjectFixture({id: '3', platform: 'javascript'}); + const angular = ProjectFixture({id: '4', platform: 'javascript-angular'}); + + it('should return currentProject=undefined when currentPanel != targetPanel', () => { + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should return the currentProject when currentPanel = targetPanel', () => { + ProjectsStore.loadInitialData([javascript]); + mockPageFilterStore([javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.METRICS_ONBOARDING, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return the first project if global selection does not have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([rust_1, rust_2]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(rust_1); + }); + + it('should return the first onboarding project', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([rust_1, javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return the first project if no selection', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return undefined if no selection and no projects have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should override current project if setCurrentProjects is called', () => { + ProjectsStore.loadInitialData([javascript, angular]); + mockPageFilterStore([javascript, angular]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + reactHooks.act(() => result.current.setCurrentProject(angular)); + expect(result.current.currentProject).toBe(angular); + }); +}); diff --git a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx similarity index 53% rename from static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx rename to static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx index b00fa5c0f6e9e0..e1ad3d0522a36e 100644 --- a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx @@ -1,24 +1,40 @@ -import {useEffect, useState} from 'react'; +import {useEffect, useMemo, useState} from 'react'; +import partition from 'lodash/partition'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {feedbackOnboardingPlatforms} from 'sentry/data/platformCategories'; +import type {SidebarPanelKey} from 'sentry/components/sidebar/types'; import PageFiltersStore from 'sentry/stores/pageFiltersStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; +import type {PlatformKey, Project} from 'sentry/types'; import useProjects from 'sentry/utils/useProjects'; -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { +type Props = { + allPlatforms: readonly PlatformKey[]; + currentPanel: '' | SidebarPanelKey; + onboardingPlatforms: readonly PlatformKey[]; + targetPanel: SidebarPanelKey; +}; + +function useCurrentProjectState({ + currentPanel, + targetPanel, + onboardingPlatforms, + allPlatforms, +}: Props) { const [currentProject, setCurrentProject] = useState(undefined); const {projects, initiallyLoaded: projectsLoaded} = useProjects(); const {selection, isReady} = useLegacyStore(PageFiltersStore); - const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; + const isActive = currentPanel === targetPanel; // Projects with onboarding instructions const projectsWithOnboarding = projects.filter( - p => p.platform && feedbackOnboardingPlatforms.includes(p.platform) + p => p.platform && onboardingPlatforms.includes(p.platform) ); + const [supportedProjects, unsupportedProjects] = useMemo(() => { + return partition(projects, p => p.platform && allPlatforms.includes(p.platform)); + }, [projects, allPlatforms]); + useEffect(() => { if (!isActive) { setCurrentProject(undefined); @@ -30,7 +46,8 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel !projectsLoaded || !projects.length || !isReady || - !projectsWithOnboarding + !projectsWithOnboarding || + !supportedProjects ) { return; } @@ -48,13 +65,23 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel return; } + // If we selected something that supports the product pick that + const projectSupportsProduct = supportedProjects.find(p => + selectedProjectIds.includes(p.id) + ); + + if (projectSupportsProduct) { + setCurrentProject(projectSupportsProduct); + return; + } + // Otherwise, just pick the first selected project const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); setCurrentProject(firstSelectedProject); return; } // No selection, so pick the first project with onboarding - setCurrentProject(projectsWithOnboarding.at(0)); + setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); return; }, [ currentProject, @@ -64,13 +91,18 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel isActive, selection.projects, projectsWithOnboarding, + supportedProjects, ]); return { - projectsWithOnboarding, - projects, + projects: supportedProjects, + allProjects: projects, currentProject, setCurrentProject, + hasDocs: + !!currentProject?.platform && onboardingPlatforms.includes(currentProject.platform), + supportedProjects, + unsupportedProjects, }; } diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx index 00a7d06f7c905f..44307fe38609b3 100644 --- a/static/app/components/replaysOnboarding/sidebar.tsx +++ b/static/app/components/replaysOnboarding/sidebar.tsx @@ -10,10 +10,10 @@ import {CompactSelect} from 'sentry/components/compactSelect'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import useOnboardingDocs from 'sentry/components/onboardingWizard/useOnboardingDocs'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {ReplayOnboardingLayout} from 'sentry/components/replaysOnboarding/replayOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/replaysOnboarding/useCurrentProjectState'; import useLoadOnboardingDoc from 'sentry/components/replaysOnboarding/useLoadOnboardingDoc'; import { generateDocKeys, @@ -30,6 +30,7 @@ import { replayBackendPlatforms, replayFrontendPlatforms, replayJsLoaderInstructionsPlatformList, + replayOnboardingPlatforms, replayPlatforms, } from 'sentry/data/platformCategories'; import platforms, {otherPlatform} from 'sentry/data/platforms'; @@ -59,6 +60,9 @@ function ReplaysOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, } = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, }); const projectSelectOptions = useMemo(() => { diff --git a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx b/static/app/components/replaysOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index da95b94d335b43..00000000000000 --- a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,99 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; - -import {splitProjectsByReplaySupport} from 'sentry/components/replaysOnboarding/utils'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {replayOnboardingPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const isActive = currentPanel === SidebarPanelKey.REPLAYS_ONBOARDING; - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - projects.filter( - p => p.platform && replayOnboardingPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - // Projects that support replays, but we haven't created the onboarding instructions (yet): - const projectWithReplaySupport = useMemo( - () => - projects.filter( - p => p.platform && replayPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!projectWithReplaySupport) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectForOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectForOnboarding) { - setCurrentProject(projectForOnboarding); - } - - // If we selected something that supports replays pick that - const projectSupportsReplay = projectWithReplaySupport.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsReplay) { - setCurrentProject(projectSupportsReplay); - } - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - // We have no selection, so pick a project which we've found - setCurrentProject(projectsWithOnboarding.at(0) || projectWithReplaySupport.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - projectWithReplaySupport, - ]); - - const {supported, unsupported} = useMemo(() => { - return splitProjectsByReplaySupport(projects); - }, [projects]); - - return { - projects: projectWithReplaySupport, - allProjects: projects, - supportedProjects: supported, - unsupportedProjects: unsupported, - currentProject, - setCurrentProject, - }; -} - -export default useCurrentProjectState; diff --git a/static/app/components/replaysOnboarding/utils.tsx b/static/app/components/replaysOnboarding/utils.tsx index 032a0b1575a4d2..53b9ec15ada10f 100644 --- a/static/app/components/replaysOnboarding/utils.tsx +++ b/static/app/components/replaysOnboarding/utils.tsx @@ -1,8 +1,6 @@ -import partition from 'lodash/partition'; - import {replayFrontendPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; -import type {PlatformIntegration, PlatformKey, Project} from 'sentry/types'; +import type {PlatformIntegration, PlatformKey} from 'sentry/types'; export function generateDocKeys(platform: PlatformKey): string[] { const platformKey = platform.startsWith('javascript') @@ -17,16 +15,6 @@ export function isPlatformSupported(platform: undefined | PlatformIntegration) { return platform?.id ? replayPlatforms.includes(platform?.id) : false; } -export function splitProjectsByReplaySupport(projects: Project[]) { - const [supported, unsupported] = partition(projects, project => - replayPlatforms.includes(project.platform!) - ); - return { - supported, - unsupported, - }; -} - export const replayJsFrameworkOptions: PlatformIntegration[] = platforms.filter(p => replayFrontendPlatforms.includes(p.id) ); diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 8737e46e1c8e96..bc76b683d4943f 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -517,17 +517,17 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [ ]; // These are all the platforms that can set up custom metrics. -export const customMetricPlatforms: Set = new Set([ +export const customMetricPlatforms: readonly PlatformKey[] = [ ...customMetricFrontendPlatforms, ...customMetricBackendPlatforms, -]); +]; /** * The list of platforms for which we have created onboarding instructions. * Should be a subset of the list of `customMetricPlatforms`. */ -export const customMetricOnboardingPlatforms = new Set( - [...customMetricPlatforms].filter( +export const customMetricOnboardingPlatforms: readonly PlatformKey[] = + customMetricPlatforms.filter( p => // Legacy platforms that do not have in-product docs ![ @@ -537,5 +537,4 @@ export const customMetricOnboardingPlatforms = new Set( 'python-pylons', 'python-tryton', ].includes(p) - ) -); + ); diff --git a/static/app/views/ddm/ddmOnboarding/sidebar.tsx b/static/app/views/ddm/ddmOnboarding/sidebar.tsx index 46dcacf2714b38..fbdcc26ec8d99a 100644 --- a/static/app/views/ddm/ddmOnboarding/sidebar.tsx +++ b/static/app/views/ddm/ddmOnboarding/sidebar.tsx @@ -7,10 +7,14 @@ import {LinkButton} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import IdBadge from 'sentry/components/idBadge'; import {SdkDocumentation} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {customMetricPlatforms} from 'sentry/data/platformCategories'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, +} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -18,8 +22,6 @@ import type {Project, SelectValue} from 'sentry/types'; import {METRICS_DOCS_URL} from 'sentry/utils/metrics/constants'; import useOrganization from 'sentry/utils/useOrganization'; -import {useCurrentProjectState} from './useCurrentProjectState'; - function MetricsOnboardingSidebar(props: CommonSidebarProps) { const {currentPanel, collapsed, hidePanel, orientation} = props; const organization = useOrganization(); @@ -36,7 +38,10 @@ function MetricsOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, hasDocs, } = useCurrentProjectState({ - isActive, + currentPanel, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, }); const projectSelectOptions = useMemo(() => { @@ -150,7 +155,7 @@ function OnboardingContent({ : undefined; const supportsCustomMetrics = - currentProject.platform && customMetricPlatforms.has(currentProject.platform); + currentProject.platform && customMetricPlatforms.includes(currentProject.platform); if (!supportsCustomMetrics) { return ( diff --git a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx b/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index 3202ba7e8ea738..00000000000000 --- a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,93 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; -import partition from 'lodash/partition'; - -import { - customMetricOnboardingPlatforms, - customMetricPlatforms, -} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -export function useCurrentProjectState({isActive}: {isActive: boolean}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const [supportedProjects, unsupportedProjects] = useMemo(() => { - return partition(projects, p => p.platform && customMetricPlatforms.has(p.platform)); - }, [projects]); - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - supportedProjects.filter( - p => p.platform && customMetricOnboardingPlatforms.has(p.platform) - ), - [supportedProjects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!supportedProjects) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectWithOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectWithOnboarding) { - setCurrentProject(projectWithOnboarding); - return; - } - - // If we selected something that supports custom metrics pick that - const projectSupportsMetrics = supportedProjects.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsMetrics) { - setCurrentProject(projectSupportsMetrics); - return; - } - // Else pick the first selected project - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - supportedProjects, - ]); - - return { - projects: supportedProjects, - hasDocs: - !!currentProject?.platform && - customMetricOnboardingPlatforms.has(currentProject.platform), - allProjects: projects, - supportedProjects, - unsupportedProjects, - currentProject, - setCurrentProject, - }; -} From 6e2a7b37b1cc0244fc786ba692b96c3d5a7b5b79 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 6 Mar 2024 16:57:32 +0000 Subject: [PATCH 104/145] Revert "ref(onboarding): refactor useCurrentProjectState to be generalized (#66118)" This reverts commit 03677ee022b291e55c6ec5c7f46af63379ae9375. Co-authored-by: michellewzhang <56095982+michellewzhang@users.noreply.github.com> --- .../feedback/feedbackOnboarding/sidebar.tsx | 15 +- .../useCurrentProjectState.tsx | 54 ++----- .../utils/useCurrentProjectState.spec.tsx | 138 ------------------ .../components/replaysOnboarding/sidebar.tsx | 6 +- .../useCurrentProjectState.tsx | 99 +++++++++++++ .../components/replaysOnboarding/utils.tsx | 14 +- static/app/data/platformCategories.tsx | 11 +- .../app/views/ddm/ddmOnboarding/sidebar.tsx | 15 +- .../ddmOnboarding/useCurrentProjectState.tsx | 93 ++++++++++++ 9 files changed, 233 insertions(+), 212 deletions(-) rename static/app/components/{onboarding/gettingStartedDoc/utils => feedback/feedbackOnboarding}/useCurrentProjectState.tsx (53%) delete mode 100644 static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx create mode 100644 static/app/components/replaysOnboarding/useCurrentProjectState.tsx create mode 100644 static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index 3094025285d62d..f96669948b06ff 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -8,12 +8,12 @@ import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right. import {Button} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeedbackOnboardingLayout} from 'sentry/components/feedback/feedbackOnboarding/feedbackOnboardingLayout'; +import useCurrentProjectState from 'sentry/components/feedback/feedbackOnboarding/useCurrentProjectState'; import useLoadFeedbackOnboardingDoc from 'sentry/components/feedback/feedbackOnboarding/useLoadFeedbackOnboardingDoc'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {FeedbackOnboardingWebApiBanner} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; -import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {replayJsFrameworkOptions} from 'sentry/components/replaysOnboarding/utils'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; @@ -43,15 +43,12 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; const hasProjectAccess = organization.access.includes('project:read'); - const {allProjects, currentProject, setCurrentProject} = useCurrentProjectState({ + const {projects, currentProject, setCurrentProject} = useCurrentProjectState({ currentPanel, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, }); const projectSelectOptions = useMemo(() => { - const supportedProjectItems: SelectValue[] = allProjects + const supportedProjectItems: SelectValue[] = projects .sort((aProject, bProject) => { // if we're comparing two projects w/ or w/o feedback alphabetical sort if (aProject.hasNewFeedbacks === bProject.hasNewFeedbacks) { @@ -76,7 +73,7 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { options: supportedProjectItems, }, ]; - }, [allProjects]); + }, [projects]); if (!isActive || !hasProjectAccess || !currentProject) { return null; @@ -115,9 +112,7 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { ) } value={currentProject?.id} - onChange={opt => - setCurrentProject(allProjects.find(p => p.id === opt.value)) - } + onChange={opt => setCurrentProject(projects.find(p => p.id === opt.value))} triggerProps={{'aria-label': currentProject?.slug}} options={projectSelectOptions} position="bottom-end" diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx b/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx similarity index 53% rename from static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx rename to static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx index e1ad3d0522a36e..b00fa5c0f6e9e0 100644 --- a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx +++ b/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx @@ -1,40 +1,24 @@ -import {useEffect, useMemo, useState} from 'react'; -import partition from 'lodash/partition'; +import {useEffect, useState} from 'react'; -import type {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import {feedbackOnboardingPlatforms} from 'sentry/data/platformCategories'; import PageFiltersStore from 'sentry/stores/pageFiltersStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {PlatformKey, Project} from 'sentry/types'; +import type {Project} from 'sentry/types'; import useProjects from 'sentry/utils/useProjects'; -type Props = { - allPlatforms: readonly PlatformKey[]; - currentPanel: '' | SidebarPanelKey; - onboardingPlatforms: readonly PlatformKey[]; - targetPanel: SidebarPanelKey; -}; - -function useCurrentProjectState({ - currentPanel, - targetPanel, - onboardingPlatforms, - allPlatforms, -}: Props) { +function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { const [currentProject, setCurrentProject] = useState(undefined); const {projects, initiallyLoaded: projectsLoaded} = useProjects(); const {selection, isReady} = useLegacyStore(PageFiltersStore); - const isActive = currentPanel === targetPanel; + const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; // Projects with onboarding instructions const projectsWithOnboarding = projects.filter( - p => p.platform && onboardingPlatforms.includes(p.platform) + p => p.platform && feedbackOnboardingPlatforms.includes(p.platform) ); - const [supportedProjects, unsupportedProjects] = useMemo(() => { - return partition(projects, p => p.platform && allPlatforms.includes(p.platform)); - }, [projects, allPlatforms]); - useEffect(() => { if (!isActive) { setCurrentProject(undefined); @@ -46,8 +30,7 @@ function useCurrentProjectState({ !projectsLoaded || !projects.length || !isReady || - !projectsWithOnboarding || - !supportedProjects + !projectsWithOnboarding ) { return; } @@ -65,23 +48,13 @@ function useCurrentProjectState({ return; } - // If we selected something that supports the product pick that - const projectSupportsProduct = supportedProjects.find(p => - selectedProjectIds.includes(p.id) - ); - - if (projectSupportsProduct) { - setCurrentProject(projectSupportsProduct); - return; - } - // Otherwise, just pick the first selected project const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); setCurrentProject(firstSelectedProject); return; } // No selection, so pick the first project with onboarding - setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); + setCurrentProject(projectsWithOnboarding.at(0)); return; }, [ currentProject, @@ -91,18 +64,13 @@ function useCurrentProjectState({ isActive, selection.projects, projectsWithOnboarding, - supportedProjects, ]); return { - projects: supportedProjects, - allProjects: projects, + projectsWithOnboarding, + projects, currentProject, setCurrentProject, - hasDocs: - !!currentProject?.platform && onboardingPlatforms.includes(currentProject.platform), - supportedProjects, - unsupportedProjects, }; } diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx deleted file mode 100644 index 7fe73d4aee2647..00000000000000 --- a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx +++ /dev/null @@ -1,138 +0,0 @@ -import {ProjectFixture} from 'sentry-fixture/project'; - -import {reactHooks} from 'sentry-test/reactTestingLibrary'; - -import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import { - customMetricOnboardingPlatforms, - customMetricPlatforms, - feedbackOnboardingPlatforms, - replayOnboardingPlatforms, - replayPlatforms, -} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import ProjectsStore from 'sentry/stores/projectsStore'; -import type {Project} from 'sentry/types'; - -function mockPageFilterStore(projects: Project[]) { - PageFiltersStore.init(); - PageFiltersStore.onInitializeUrlState( - { - projects: projects.map(p => parseInt(p.id, 10)), - environments: [], - datetime: { - period: '7d', - start: null, - end: null, - utc: null, - }, - }, - new Set() - ); -} - -describe('useCurrentProjectState', () => { - const rust_1 = ProjectFixture({id: '1', platform: 'rust'}); - const rust_2 = ProjectFixture({id: '2', platform: 'rust'}); - const javascript = ProjectFixture({id: '3', platform: 'javascript'}); - const angular = ProjectFixture({id: '4', platform: 'javascript-angular'}); - - it('should return currentProject=undefined when currentPanel != targetPanel', () => { - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, - }, - }); - expect(result.current.currentProject).toBe(undefined); - }); - - it('should return the currentProject when currentPanel = targetPanel', () => { - ProjectsStore.loadInitialData([javascript]); - mockPageFilterStore([javascript]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.METRICS_ONBOARDING, - targetPanel: SidebarPanelKey.METRICS_ONBOARDING, - onboardingPlatforms: customMetricOnboardingPlatforms, - allPlatforms: customMetricPlatforms, - }, - }); - expect(result.current.currentProject).toBe(javascript); - }); - - it('should return the first project if global selection does not have onboarding', () => { - ProjectsStore.loadInitialData([rust_1, rust_2]); - mockPageFilterStore([rust_1, rust_2]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, - targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, - onboardingPlatforms: replayOnboardingPlatforms, - allPlatforms: replayPlatforms, - }, - }); - expect(result.current.currentProject).toBe(rust_1); - }); - - it('should return the first onboarding project', () => { - ProjectsStore.loadInitialData([rust_1, javascript]); - mockPageFilterStore([rust_1, javascript]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, - }, - }); - expect(result.current.currentProject).toBe(javascript); - }); - - it('should return the first project if no selection', () => { - ProjectsStore.loadInitialData([rust_1, javascript]); - mockPageFilterStore([]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, - }, - }); - expect(result.current.currentProject).toBe(javascript); - }); - - it('should return undefined if no selection and no projects have onboarding', () => { - ProjectsStore.loadInitialData([rust_1, rust_2]); - mockPageFilterStore([]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, - }, - }); - expect(result.current.currentProject).toBe(undefined); - }); - - it('should override current project if setCurrentProjects is called', () => { - ProjectsStore.loadInitialData([javascript, angular]); - mockPageFilterStore([javascript, angular]); - const {result} = reactHooks.renderHook(useCurrentProjectState, { - initialProps: { - currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, - onboardingPlatforms: feedbackOnboardingPlatforms, - allPlatforms: feedbackOnboardingPlatforms, - }, - }); - expect(result.current.currentProject).toBe(javascript); - reactHooks.act(() => result.current.setCurrentProject(angular)); - expect(result.current.currentProject).toBe(angular); - }); -}); diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx index 44307fe38609b3..00a7d06f7c905f 100644 --- a/static/app/components/replaysOnboarding/sidebar.tsx +++ b/static/app/components/replaysOnboarding/sidebar.tsx @@ -10,10 +10,10 @@ import {CompactSelect} from 'sentry/components/compactSelect'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; -import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import useOnboardingDocs from 'sentry/components/onboardingWizard/useOnboardingDocs'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {ReplayOnboardingLayout} from 'sentry/components/replaysOnboarding/replayOnboardingLayout'; +import useCurrentProjectState from 'sentry/components/replaysOnboarding/useCurrentProjectState'; import useLoadOnboardingDoc from 'sentry/components/replaysOnboarding/useLoadOnboardingDoc'; import { generateDocKeys, @@ -30,7 +30,6 @@ import { replayBackendPlatforms, replayFrontendPlatforms, replayJsLoaderInstructionsPlatformList, - replayOnboardingPlatforms, replayPlatforms, } from 'sentry/data/platformCategories'; import platforms, {otherPlatform} from 'sentry/data/platforms'; @@ -60,9 +59,6 @@ function ReplaysOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, } = useCurrentProjectState({ currentPanel, - targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, - onboardingPlatforms: replayOnboardingPlatforms, - allPlatforms: replayPlatforms, }); const projectSelectOptions = useMemo(() => { diff --git a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx b/static/app/components/replaysOnboarding/useCurrentProjectState.tsx new file mode 100644 index 00000000000000..da95b94d335b43 --- /dev/null +++ b/static/app/components/replaysOnboarding/useCurrentProjectState.tsx @@ -0,0 +1,99 @@ +import {useEffect, useMemo, useState} from 'react'; + +import {splitProjectsByReplaySupport} from 'sentry/components/replaysOnboarding/utils'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import {replayOnboardingPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; +import PageFiltersStore from 'sentry/stores/pageFiltersStore'; +import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import type {Project} from 'sentry/types'; +import useProjects from 'sentry/utils/useProjects'; + +function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { + const [currentProject, setCurrentProject] = useState(undefined); + const {projects, initiallyLoaded: projectsLoaded} = useProjects(); + const {selection, isReady} = useLegacyStore(PageFiltersStore); + + const isActive = currentPanel === SidebarPanelKey.REPLAYS_ONBOARDING; + + // Projects where we have the onboarding instructions ready: + const projectsWithOnboarding = useMemo( + () => + projects.filter( + p => p.platform && replayOnboardingPlatforms.includes(p.platform) && !p.hasReplays + ), + [projects] + ); + + // Projects that support replays, but we haven't created the onboarding instructions (yet): + const projectWithReplaySupport = useMemo( + () => + projects.filter( + p => p.platform && replayPlatforms.includes(p.platform) && !p.hasReplays + ), + [projects] + ); + + useEffect(() => { + if (!isActive) { + setCurrentProject(undefined); + } + }, [isActive]); + + useEffect(() => { + if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { + return; + } + + if (!projectWithReplaySupport) { + return; + } + + if (selection.projects.length) { + const selectedProjectIds = selection.projects.map(String); + // If we selected something that has onboarding instructions, pick that first + const projectForOnboarding = projectsWithOnboarding.find(p => + selectedProjectIds.includes(p.id) + ); + if (projectForOnboarding) { + setCurrentProject(projectForOnboarding); + } + + // If we selected something that supports replays pick that + const projectSupportsReplay = projectWithReplaySupport.find(p => + selectedProjectIds.includes(p.id) + ); + if (projectSupportsReplay) { + setCurrentProject(projectSupportsReplay); + } + const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); + setCurrentProject(firstSelectedProject); + } else { + // We have no selection, so pick a project which we've found + setCurrentProject(projectsWithOnboarding.at(0) || projectWithReplaySupport.at(0)); + } + }, [ + currentProject, + projectsLoaded, + projects, + isReady, + isActive, + selection.projects, + projectsWithOnboarding, + projectWithReplaySupport, + ]); + + const {supported, unsupported} = useMemo(() => { + return splitProjectsByReplaySupport(projects); + }, [projects]); + + return { + projects: projectWithReplaySupport, + allProjects: projects, + supportedProjects: supported, + unsupportedProjects: unsupported, + currentProject, + setCurrentProject, + }; +} + +export default useCurrentProjectState; diff --git a/static/app/components/replaysOnboarding/utils.tsx b/static/app/components/replaysOnboarding/utils.tsx index 53b9ec15ada10f..032a0b1575a4d2 100644 --- a/static/app/components/replaysOnboarding/utils.tsx +++ b/static/app/components/replaysOnboarding/utils.tsx @@ -1,6 +1,8 @@ +import partition from 'lodash/partition'; + import {replayFrontendPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; -import type {PlatformIntegration, PlatformKey} from 'sentry/types'; +import type {PlatformIntegration, PlatformKey, Project} from 'sentry/types'; export function generateDocKeys(platform: PlatformKey): string[] { const platformKey = platform.startsWith('javascript') @@ -15,6 +17,16 @@ export function isPlatformSupported(platform: undefined | PlatformIntegration) { return platform?.id ? replayPlatforms.includes(platform?.id) : false; } +export function splitProjectsByReplaySupport(projects: Project[]) { + const [supported, unsupported] = partition(projects, project => + replayPlatforms.includes(project.platform!) + ); + return { + supported, + unsupported, + }; +} + export const replayJsFrameworkOptions: PlatformIntegration[] = platforms.filter(p => replayFrontendPlatforms.includes(p.id) ); diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index bc76b683d4943f..8737e46e1c8e96 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -517,17 +517,17 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [ ]; // These are all the platforms that can set up custom metrics. -export const customMetricPlatforms: readonly PlatformKey[] = [ +export const customMetricPlatforms: Set = new Set([ ...customMetricFrontendPlatforms, ...customMetricBackendPlatforms, -]; +]); /** * The list of platforms for which we have created onboarding instructions. * Should be a subset of the list of `customMetricPlatforms`. */ -export const customMetricOnboardingPlatforms: readonly PlatformKey[] = - customMetricPlatforms.filter( +export const customMetricOnboardingPlatforms = new Set( + [...customMetricPlatforms].filter( p => // Legacy platforms that do not have in-product docs ![ @@ -537,4 +537,5 @@ export const customMetricOnboardingPlatforms: readonly PlatformKey[] = 'python-pylons', 'python-tryton', ].includes(p) - ); + ) +); diff --git a/static/app/views/ddm/ddmOnboarding/sidebar.tsx b/static/app/views/ddm/ddmOnboarding/sidebar.tsx index fbdcc26ec8d99a..46dcacf2714b38 100644 --- a/static/app/views/ddm/ddmOnboarding/sidebar.tsx +++ b/static/app/views/ddm/ddmOnboarding/sidebar.tsx @@ -7,14 +7,10 @@ import {LinkButton} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import IdBadge from 'sentry/components/idBadge'; import {SdkDocumentation} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; -import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import { - customMetricOnboardingPlatforms, - customMetricPlatforms, -} from 'sentry/data/platformCategories'; +import {customMetricPlatforms} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -22,6 +18,8 @@ import type {Project, SelectValue} from 'sentry/types'; import {METRICS_DOCS_URL} from 'sentry/utils/metrics/constants'; import useOrganization from 'sentry/utils/useOrganization'; +import {useCurrentProjectState} from './useCurrentProjectState'; + function MetricsOnboardingSidebar(props: CommonSidebarProps) { const {currentPanel, collapsed, hidePanel, orientation} = props; const organization = useOrganization(); @@ -38,10 +36,7 @@ function MetricsOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, hasDocs, } = useCurrentProjectState({ - currentPanel, - targetPanel: SidebarPanelKey.METRICS_ONBOARDING, - onboardingPlatforms: customMetricOnboardingPlatforms, - allPlatforms: customMetricPlatforms, + isActive, }); const projectSelectOptions = useMemo(() => { @@ -155,7 +150,7 @@ function OnboardingContent({ : undefined; const supportsCustomMetrics = - currentProject.platform && customMetricPlatforms.includes(currentProject.platform); + currentProject.platform && customMetricPlatforms.has(currentProject.platform); if (!supportsCustomMetrics) { return ( diff --git a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx b/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx new file mode 100644 index 00000000000000..3202ba7e8ea738 --- /dev/null +++ b/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx @@ -0,0 +1,93 @@ +import {useEffect, useMemo, useState} from 'react'; +import partition from 'lodash/partition'; + +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, +} from 'sentry/data/platformCategories'; +import PageFiltersStore from 'sentry/stores/pageFiltersStore'; +import {useLegacyStore} from 'sentry/stores/useLegacyStore'; +import type {Project} from 'sentry/types'; +import useProjects from 'sentry/utils/useProjects'; + +export function useCurrentProjectState({isActive}: {isActive: boolean}) { + const [currentProject, setCurrentProject] = useState(undefined); + const {projects, initiallyLoaded: projectsLoaded} = useProjects(); + const {selection, isReady} = useLegacyStore(PageFiltersStore); + + const [supportedProjects, unsupportedProjects] = useMemo(() => { + return partition(projects, p => p.platform && customMetricPlatforms.has(p.platform)); + }, [projects]); + + // Projects where we have the onboarding instructions ready: + const projectsWithOnboarding = useMemo( + () => + supportedProjects.filter( + p => p.platform && customMetricOnboardingPlatforms.has(p.platform) + ), + [supportedProjects] + ); + + useEffect(() => { + if (!isActive) { + setCurrentProject(undefined); + } + }, [isActive]); + + useEffect(() => { + if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { + return; + } + + if (!supportedProjects) { + return; + } + + if (selection.projects.length) { + const selectedProjectIds = selection.projects.map(String); + // If we selected something that has onboarding instructions, pick that first + const projectWithOnboarding = projectsWithOnboarding.find(p => + selectedProjectIds.includes(p.id) + ); + if (projectWithOnboarding) { + setCurrentProject(projectWithOnboarding); + return; + } + + // If we selected something that supports custom metrics pick that + const projectSupportsMetrics = supportedProjects.find(p => + selectedProjectIds.includes(p.id) + ); + if (projectSupportsMetrics) { + setCurrentProject(projectSupportsMetrics); + return; + } + // Else pick the first selected project + const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); + setCurrentProject(firstSelectedProject); + } else { + setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); + } + }, [ + currentProject, + projectsLoaded, + projects, + isReady, + isActive, + selection.projects, + projectsWithOnboarding, + supportedProjects, + ]); + + return { + projects: supportedProjects, + hasDocs: + !!currentProject?.platform && + customMetricOnboardingPlatforms.has(currentProject.platform), + allProjects: projects, + supportedProjects, + unsupportedProjects, + currentProject, + setCurrentProject, + }; +} From 739564a21fb3fc95a4159f6c3f627b942412b4f8 Mon Sep 17 00:00:00 2001 From: Michael Sun <55160142+MichaelSun48@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:58:31 -0500 Subject: [PATCH 105/145] feat(code-mappings): LA automatic code mapping support for PHP projects (#66151) (This feature is in LA) Whitelisted php projects for automatic code mapping support and added unit tests --------- Co-authored-by: Bartek Ogryczak Co-authored-by: Armen Zambrano G. <44410+armenzg@users.noreply.github.com> --- src/sentry/conf/server.py | 2 + src/sentry/features/__init__.py | 1 + src/sentry/tasks/derive_code_mappings.py | 14 ++- src/sentry/tasks/post_process.py | 2 +- .../sentry/tasks/test_derive_code_mappings.py | 100 +++++++++++++++--- 5 files changed, 100 insertions(+), 19 deletions(-) diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py index 75c8be67659d82..66a480e8f522d8 100644 --- a/src/sentry/conf/server.py +++ b/src/sentry/conf/server.py @@ -1529,6 +1529,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]: "organizations:default-inbound-filters": False, # Enables automatically deriving of code mappings "organizations:derive-code-mappings": True, + # Enables automatically deriving of PHP code mappings + "organizations:derive-code-mappings-php": False, # Enable device.class as a selectable column "organizations:device-classification": False, # Enables synthesis of device.class in ingest diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py index 0e1abfced2258e..2650bfb773a0a6 100644 --- a/src/sentry/features/__init__.py +++ b/src/sentry/features/__init__.py @@ -96,6 +96,7 @@ default_manager.add("organizations:ddm-metrics-api-unit-normalization", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:default-high-priority-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:derive-code-mappings", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) +default_manager.add("organizations:derive-code-mappings-php", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:device-class-synthesis", OrganizationFeature, FeatureHandlerStrategy.INTERNAL) default_manager.add("organizations:device-classification", OrganizationFeature, FeatureHandlerStrategy.REMOTE) default_manager.add("organizations:discover-events-rate-limit", OrganizationFeature, FeatureHandlerStrategy.REMOTE) diff --git a/src/sentry/tasks/derive_code_mappings.py b/src/sentry/tasks/derive_code_mappings.py index 746ba13bbff193..0b7b9f93175925 100644 --- a/src/sentry/tasks/derive_code_mappings.py +++ b/src/sentry/tasks/derive_code_mappings.py @@ -22,7 +22,7 @@ from sentry.utils.locking import UnableToAcquireLock from sentry.utils.safe import get_path -SUPPORTED_LANGUAGES = ["javascript", "python", "node", "ruby"] +SUPPORTED_LANGUAGES = ["javascript", "python", "node", "ruby", "php"] logger = logging.getLogger(__name__) @@ -92,13 +92,19 @@ def derive_code_mappings( "organization.slug": org.slug, } - if ( - not features.has("organizations:derive-code-mappings", org) - or not data["platform"] in SUPPORTED_LANGUAGES + if not ( + features.has("organizations:derive-code-mappings", org) + and data.get("platform") in SUPPORTED_LANGUAGES ): logger.info("Event should not be processed.", extra=extra) return + # php automatic code mappings currently in LA + if data["platform"].startswith("php") and not features.has( + "organizations:derive-code-mappings-php", org + ): + return + stacktrace_paths: list[str] = identify_stacktrace_paths(data) if not stacktrace_paths: return diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py index c53204fc7a4044..47e7b3118fd786 100644 --- a/src/sentry/tasks/post_process.py +++ b/src/sentry/tasks/post_process.py @@ -1126,7 +1126,7 @@ def process_code_mappings(job: PostProcessJob) -> None: with metrics.timer("post_process.process_code_mappings.duration"): # Supported platforms - if event.data["platform"] not in SUPPORTED_LANGUAGES: + if event.data.get("platform") not in SUPPORTED_LANGUAGES: return # To limit the overall number of tasks, only process one issue per project per hour. In diff --git a/tests/sentry/tasks/test_derive_code_mappings.py b/tests/sentry/tasks/test_derive_code_mappings.py index f69a774c61e106..585cf2b9624550 100644 --- a/tests/sentry/tasks/test_derive_code_mappings.py +++ b/tests/sentry/tasks/test_derive_code_mappings.py @@ -329,6 +329,69 @@ def test_derive_code_mappings_starts_with_app_dot_dot_slash(self): assert code_mapping.repository.name == repo_name +class TestPhpDeriveCodeMappings(BaseDeriveCodeMappings): + def setUp(self): + super().setUp() + self.platform = "php" + self.event_data = self.generate_data( + [ + {"in_app": True, "filename": "/sentry/capybara.php"}, + {"in_app": True, "filename": "/sentry/potato/kangaroo.php"}, + { + "in_app": False, + "filename": "/sentry/potato/vendor/sentry/sentry/src/functions.php", + }, + ], + self.platform, + ) + + @responses.activate + @with_feature({"organizations:derive-code-mappings-php": False}) + def test_missing_feature_flag(self): + repo_name = "php/place" + with patch( + "sentry.integrations.github.client.GitHubClientMixin.get_trees_for_org" + ) as mock_get_trees_for_org: + mock_get_trees_for_org.return_value = { + repo_name: RepoTree(Repo(repo_name, "master"), ["sentry/potato/kangaroo.php"]) + } + derive_code_mappings(self.project.id, self.event_data) + # Check to make sure no code mappings were generated + assert not RepositoryProjectPathConfig.objects.exists() + + @responses.activate + @with_feature({"organizations:derive-code-mappings-php": True}) + def test_derive_code_mappings_basic_php(self): + repo_name = "php/place" + with patch( + "sentry.integrations.github.client.GitHubClientMixin.get_trees_for_org" + ) as mock_get_trees_for_org: + mock_get_trees_for_org.return_value = { + repo_name: RepoTree(Repo(repo_name, "master"), ["sentry/potato/kangaroo.php"]) + } + derive_code_mappings(self.project.id, self.event_data) + code_mapping = RepositoryProjectPathConfig.objects.all()[0] + assert code_mapping.stack_root == "" + assert code_mapping.source_root == "" + assert code_mapping.repository.name == repo_name + + @responses.activate + @with_feature({"organizations:derive-code-mappings-php": True}) + def test_derive_code_mappings_different_roots_php(self): + repo_name = "php/place" + with patch( + "sentry.integrations.github.client.GitHubClientMixin.get_trees_for_org" + ) as mock_get_trees_for_org: + mock_get_trees_for_org.return_value = { + repo_name: RepoTree(Repo(repo_name, "master"), ["src/sentry/potato/kangaroo.php"]) + } + derive_code_mappings(self.project.id, self.event_data) + code_mapping = RepositoryProjectPathConfig.objects.all()[0] + assert code_mapping.stack_root == "sentry/" + assert code_mapping.source_root == "src/sentry/" + assert code_mapping.repository.name == repo_name + + @region_silo_test class TestPythonDeriveCodeMappings(BaseDeriveCodeMappings): def setUp(self): @@ -369,12 +432,15 @@ def test_feature_off(self): assert not RepositoryProjectPathConfig.objects.filter(project_id=self.project.id).exists() - with patch( - "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", - return_value={ - self.project: ["sentry/models/release.py", "sentry/tasks.py"], - }, - ) as mock_identify_stacktraces, self.tasks(): + with ( + patch( + "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", + return_value={ + self.project: ["sentry/models/release.py", "sentry/tasks.py"], + }, + ) as mock_identify_stacktraces, + self.tasks(), + ): derive_code_mappings(self.project.id, event.data) assert mock_identify_stacktraces.call_count == 0 @@ -398,10 +464,13 @@ def test_derive_code_mappings_single_project( assert not RepositoryProjectPathConfig.objects.filter(project_id=self.project.id).exists() - with patch( - "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", - return_value=["sentry/models/release.py", "sentry/tasks.py"], - ) as mock_identify_stacktraces, self.tasks(): + with ( + patch( + "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", + return_value=["sentry/models/release.py", "sentry/tasks.py"], + ) as mock_identify_stacktraces, + self.tasks(), + ): derive_code_mappings(self.project.id, event.data) assert mock_identify_stacktraces.call_count == 1 @@ -453,10 +522,13 @@ def test_derive_code_mappings_duplicates( assert RepositoryProjectPathConfig.objects.filter(project_id=self.project.id).exists() - with patch( - "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", - return_value=["sentry/models/release.py", "sentry/tasks.py"], - ) as mock_identify_stacktraces, self.tasks(): + with ( + patch( + "sentry.tasks.derive_code_mappings.identify_stacktrace_paths", + return_value=["sentry/models/release.py", "sentry/tasks.py"], + ) as mock_identify_stacktraces, + self.tasks(), + ): derive_code_mappings(self.project.id, event.data) assert mock_identify_stacktraces.call_count == 1 From 5007096c2617f104f8a97f0c2b6aa7b1e47189c3 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Wed, 6 Mar 2024 09:07:50 -0800 Subject: [PATCH 106/145] chore(superuser): Add logs to check u2f (#66393) --- src/sentry/api/endpoints/auth_index.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py index 5a291455c2b187..b89f97c61e8f91 100644 --- a/src/sentry/api/endpoints/auth_index.py +++ b/src/sentry/api/endpoints/auth_index.py @@ -155,6 +155,15 @@ def _validate_superuser( SSO and if they do not, we redirect them back to the SSO login. """ + logger.info( + "auth-index.validate_superuser", + extra={ + "validator": validator, + "user": request.user.id, + "raise_exception": not DISABLE_SSO_CHECK_FOR_LOCAL_DEV, + "verify_authenticator": verify_authenticator, + }, + ) # Disable exception for missing password or u2f code if we're running locally validator.is_valid(raise_exception=not DISABLE_SSO_CHECK_FOR_LOCAL_DEV) @@ -248,15 +257,13 @@ def put(self, request: Request) -> Response: id=Superuser.org_id, include_teams=False, include_projects=False ) - verify_authenticator = ( - False - if superuser_org is None - else features.has( + if superuser_org is not None: + has_u2f_flag = features.has( "organizations:u2f-superuser-form", superuser_org.organization, actor=request.user, ) - ) + verify_authenticator = has_u2f_flag if verify_authenticator: if not Authenticator.objects.filter( @@ -265,6 +272,15 @@ def put(self, request: Request) -> Response: return Response( {"detail": {"code": "no_u2f"}}, status=status.HTTP_403_FORBIDDEN ) + logger.info( + "auth-index.put", + extra={ + "organization": superuser_org, + "u2f_flag": has_u2f_flag, + "user": request.user.id, + "verify_authenticator": verify_authenticator, + }, + ) try: authenticated = self._validate_superuser(validator, request, verify_authenticator) except ValidationError: From 703029c70bb2661b55a15d21b7579d71b7984eb3 Mon Sep 17 00:00:00 2001 From: Evan Purkhiser Date: Wed, 6 Mar 2024 12:20:30 -0500 Subject: [PATCH 107/145] ref(crons): Avoid extra hashing on fingerprint UUIDs (#66373) --- src/sentry/monitors/logic/mark_failed.py | 7 ++++--- tests/sentry/monitors/logic/test_mark_ok.py | 3 +-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/sentry/monitors/logic/mark_failed.py b/src/sentry/monitors/logic/mark_failed.py index 57829aa25f2f29..d9d0bf98704d11 100644 --- a/src/sentry/monitors/logic/mark_failed.py +++ b/src/sentry/monitors/logic/mark_failed.py @@ -7,7 +7,6 @@ from django.db.models import Q from sentry import features -from sentry.grouping.utils import hash_from_values from sentry.issues.grouptype import ( MonitorCheckInFailure, MonitorCheckInMissed, @@ -143,8 +142,10 @@ def mark_failed_threshold(failed_checkin: MonitorCheckIn, failure_issue_threshol if not monitor_muted: starting_checkin = previous_checkins[0] - # for new incidents, generate a new hash from a uuid to use - fingerprint = hash_from_values([uuid.uuid4()]) + # for new incidents, generate a uuid as the fingerprint. This is + # not deterministic of any property of the incident and is simply + # used to associate the incident to it's event occurrences + fingerprint = uuid.uuid4().hex MonitorIncident.objects.create( monitor=monitor_env.monitor, diff --git a/tests/sentry/monitors/logic/test_mark_ok.py b/tests/sentry/monitors/logic/test_mark_ok.py index c9269b17f8a2c2..84bb2117af8808 100644 --- a/tests/sentry/monitors/logic/test_mark_ok.py +++ b/tests/sentry/monitors/logic/test_mark_ok.py @@ -4,7 +4,6 @@ from django.utils import timezone -from sentry.grouping.utils import hash_from_values from sentry.issues.producer import PayloadType from sentry.models.group import GroupStatus from sentry.monitors.logic.mark_ok import mark_ok @@ -104,7 +103,7 @@ def test_mark_ok_recovery_threshold(self, mock_produce_occurrence_to_kafka): monitor_environment=monitor_environment, starting_checkin=first_checkin, starting_timestamp=first_checkin.date_added, - grouphash=hash_from_values([uuid.uuid4()]), + grouphash=uuid.uuid4().hex, ) # Create OK check-ins From 5c9084cebb0f6c1e0b450b3ca2bd1d7edeb0aa29 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 6 Mar 2024 09:44:57 -0800 Subject: [PATCH 108/145] ref(onboarding): refactor useCurrentProjectState to be generalized (#66425) - had to revert the original PR (https://github.com/getsentry/sentry/pull/66118) due to a failing test - fixed in this PR! this is the only change from the previous PR - DDM, Replays, and Feedback onboarding were all using some variation of this hook: `useCurrentProjectState`. They're all pretty similar, so I refactored them into one general reusable hook - I modified the respective `sidebar.tsx` files to use this new hook - I also added a hook test --- .../feedback/feedbackOnboarding/sidebar.tsx | 15 +- .../utils/useCurrentProjectState.spec.tsx | 138 ++++++++++++++++++ .../utils}/useCurrentProjectState.tsx | 54 +++++-- .../components/replaysOnboarding/sidebar.tsx | 6 +- .../useCurrentProjectState.tsx | 99 ------------- .../components/replaysOnboarding/utils.tsx | 14 +- static/app/data/platformCategories.tsx | 11 +- .../app/views/ddm/ddmOnboarding/sidebar.tsx | 15 +- .../ddmOnboarding/useCurrentProjectState.tsx | 93 ------------ 9 files changed, 212 insertions(+), 233 deletions(-) create mode 100644 static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx rename static/app/components/{feedback/feedbackOnboarding => onboarding/gettingStartedDoc/utils}/useCurrentProjectState.tsx (53%) delete mode 100644 static/app/components/replaysOnboarding/useCurrentProjectState.tsx delete mode 100644 static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index f96669948b06ff..3094025285d62d 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -8,12 +8,12 @@ import HighlightTopRightPattern from 'sentry-images/pattern/highlight-top-right. import {Button} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeedbackOnboardingLayout} from 'sentry/components/feedback/feedbackOnboarding/feedbackOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/feedback/feedbackOnboarding/useCurrentProjectState'; import useLoadFeedbackOnboardingDoc from 'sentry/components/feedback/feedbackOnboarding/useLoadFeedbackOnboardingDoc'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; import {FeedbackOnboardingWebApiBanner} from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {replayJsFrameworkOptions} from 'sentry/components/replaysOnboarding/utils'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; @@ -43,12 +43,15 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; const hasProjectAccess = organization.access.includes('project:read'); - const {projects, currentProject, setCurrentProject} = useCurrentProjectState({ + const {allProjects, currentProject, setCurrentProject} = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, }); const projectSelectOptions = useMemo(() => { - const supportedProjectItems: SelectValue[] = projects + const supportedProjectItems: SelectValue[] = allProjects .sort((aProject, bProject) => { // if we're comparing two projects w/ or w/o feedback alphabetical sort if (aProject.hasNewFeedbacks === bProject.hasNewFeedbacks) { @@ -73,7 +76,7 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { options: supportedProjectItems, }, ]; - }, [projects]); + }, [allProjects]); if (!isActive || !hasProjectAccess || !currentProject) { return null; @@ -112,7 +115,9 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) { ) } value={currentProject?.id} - onChange={opt => setCurrentProject(projects.find(p => p.id === opt.value))} + onChange={opt => + setCurrentProject(allProjects.find(p => p.id === opt.value)) + } triggerProps={{'aria-label': currentProject?.slug}} options={projectSelectOptions} position="bottom-end" diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx new file mode 100644 index 00000000000000..f6417896ae768f --- /dev/null +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.spec.tsx @@ -0,0 +1,138 @@ +import {ProjectFixture} from 'sentry-fixture/project'; + +import {reactHooks} from 'sentry-test/reactTestingLibrary'; + +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; +import {SidebarPanelKey} from 'sentry/components/sidebar/types'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, + feedbackOnboardingPlatforms, + replayOnboardingPlatforms, + replayPlatforms, +} from 'sentry/data/platformCategories'; +import PageFiltersStore from 'sentry/stores/pageFiltersStore'; +import ProjectsStore from 'sentry/stores/projectsStore'; +import type {Project} from 'sentry/types'; + +function mockPageFilterStore(projects: Project[]) { + PageFiltersStore.init(); + PageFiltersStore.onInitializeUrlState( + { + projects: projects.map(p => parseInt(p.id, 10)), + environments: [], + datetime: { + period: '7d', + start: null, + end: null, + utc: null, + }, + }, + new Set() + ); +} + +describe('useCurrentProjectState', () => { + const rust_1 = ProjectFixture({id: '1', platform: 'rust'}); + const rust_2 = ProjectFixture({id: '2', platform: 'rust'}); + const javascript = ProjectFixture({id: '3', platform: 'javascript'}); + const angular = ProjectFixture({id: '4', platform: 'javascript-angular'}); + + it('should return currentProject=undefined when currentPanel != targetPanel', () => { + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should return the currentProject when currentPanel = targetPanel', () => { + ProjectsStore.loadInitialData([javascript]); + mockPageFilterStore([javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.METRICS_ONBOARDING, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return the first project if global selection does not have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([rust_1, rust_2]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(rust_1); + }); + + it('should return the first onboarding project', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([rust_1, javascript]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(rust_1); + }); + + it('should return the first project if no selection', () => { + ProjectsStore.loadInitialData([rust_1, javascript]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + }); + + it('should return undefined if no selection and no projects have onboarding', () => { + ProjectsStore.loadInitialData([rust_1, rust_2]); + mockPageFilterStore([]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, + }, + }); + expect(result.current.currentProject).toBe(undefined); + }); + + it('should override current project if setCurrentProjects is called', () => { + ProjectsStore.loadInitialData([javascript, angular]); + mockPageFilterStore([javascript, angular]); + const {result} = reactHooks.renderHook(useCurrentProjectState, { + initialProps: { + currentPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + targetPanel: SidebarPanelKey.FEEDBACK_ONBOARDING, + onboardingPlatforms: feedbackOnboardingPlatforms, + allPlatforms: feedbackOnboardingPlatforms, + }, + }); + expect(result.current.currentProject).toBe(javascript); + reactHooks.act(() => result.current.setCurrentProject(angular)); + expect(result.current.currentProject).toBe(angular); + }); +}); diff --git a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx similarity index 53% rename from static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx rename to static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx index b00fa5c0f6e9e0..e1ad3d0522a36e 100644 --- a/static/app/components/feedback/feedbackOnboarding/useCurrentProjectState.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState.tsx @@ -1,24 +1,40 @@ -import {useEffect, useState} from 'react'; +import {useEffect, useMemo, useState} from 'react'; +import partition from 'lodash/partition'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {feedbackOnboardingPlatforms} from 'sentry/data/platformCategories'; +import type {SidebarPanelKey} from 'sentry/components/sidebar/types'; import PageFiltersStore from 'sentry/stores/pageFiltersStore'; import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; +import type {PlatformKey, Project} from 'sentry/types'; import useProjects from 'sentry/utils/useProjects'; -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { +type Props = { + allPlatforms: readonly PlatformKey[]; + currentPanel: '' | SidebarPanelKey; + onboardingPlatforms: readonly PlatformKey[]; + targetPanel: SidebarPanelKey; +}; + +function useCurrentProjectState({ + currentPanel, + targetPanel, + onboardingPlatforms, + allPlatforms, +}: Props) { const [currentProject, setCurrentProject] = useState(undefined); const {projects, initiallyLoaded: projectsLoaded} = useProjects(); const {selection, isReady} = useLegacyStore(PageFiltersStore); - const isActive = currentPanel === SidebarPanelKey.FEEDBACK_ONBOARDING; + const isActive = currentPanel === targetPanel; // Projects with onboarding instructions const projectsWithOnboarding = projects.filter( - p => p.platform && feedbackOnboardingPlatforms.includes(p.platform) + p => p.platform && onboardingPlatforms.includes(p.platform) ); + const [supportedProjects, unsupportedProjects] = useMemo(() => { + return partition(projects, p => p.platform && allPlatforms.includes(p.platform)); + }, [projects, allPlatforms]); + useEffect(() => { if (!isActive) { setCurrentProject(undefined); @@ -30,7 +46,8 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel !projectsLoaded || !projects.length || !isReady || - !projectsWithOnboarding + !projectsWithOnboarding || + !supportedProjects ) { return; } @@ -48,13 +65,23 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel return; } + // If we selected something that supports the product pick that + const projectSupportsProduct = supportedProjects.find(p => + selectedProjectIds.includes(p.id) + ); + + if (projectSupportsProduct) { + setCurrentProject(projectSupportsProduct); + return; + } + // Otherwise, just pick the first selected project const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); setCurrentProject(firstSelectedProject); return; } // No selection, so pick the first project with onboarding - setCurrentProject(projectsWithOnboarding.at(0)); + setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); return; }, [ currentProject, @@ -64,13 +91,18 @@ function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanel isActive, selection.projects, projectsWithOnboarding, + supportedProjects, ]); return { - projectsWithOnboarding, - projects, + projects: supportedProjects, + allProjects: projects, currentProject, setCurrentProject, + hasDocs: + !!currentProject?.platform && onboardingPlatforms.includes(currentProject.platform), + supportedProjects, + unsupportedProjects, }; } diff --git a/static/app/components/replaysOnboarding/sidebar.tsx b/static/app/components/replaysOnboarding/sidebar.tsx index 00a7d06f7c905f..44307fe38609b3 100644 --- a/static/app/components/replaysOnboarding/sidebar.tsx +++ b/static/app/components/replaysOnboarding/sidebar.tsx @@ -10,10 +10,10 @@ import {CompactSelect} from 'sentry/components/compactSelect'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import useOnboardingDocs from 'sentry/components/onboardingWizard/useOnboardingDocs'; import {PlatformOptionDropdown} from 'sentry/components/replaysOnboarding/platformOptionDropdown'; import {ReplayOnboardingLayout} from 'sentry/components/replaysOnboarding/replayOnboardingLayout'; -import useCurrentProjectState from 'sentry/components/replaysOnboarding/useCurrentProjectState'; import useLoadOnboardingDoc from 'sentry/components/replaysOnboarding/useLoadOnboardingDoc'; import { generateDocKeys, @@ -30,6 +30,7 @@ import { replayBackendPlatforms, replayFrontendPlatforms, replayJsLoaderInstructionsPlatformList, + replayOnboardingPlatforms, replayPlatforms, } from 'sentry/data/platformCategories'; import platforms, {otherPlatform} from 'sentry/data/platforms'; @@ -59,6 +60,9 @@ function ReplaysOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, } = useCurrentProjectState({ currentPanel, + targetPanel: SidebarPanelKey.REPLAYS_ONBOARDING, + onboardingPlatforms: replayOnboardingPlatforms, + allPlatforms: replayPlatforms, }); const projectSelectOptions = useMemo(() => { diff --git a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx b/static/app/components/replaysOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index da95b94d335b43..00000000000000 --- a/static/app/components/replaysOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,99 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; - -import {splitProjectsByReplaySupport} from 'sentry/components/replaysOnboarding/utils'; -import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {replayOnboardingPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -function useCurrentProjectState({currentPanel}: {currentPanel: '' | SidebarPanelKey}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const isActive = currentPanel === SidebarPanelKey.REPLAYS_ONBOARDING; - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - projects.filter( - p => p.platform && replayOnboardingPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - // Projects that support replays, but we haven't created the onboarding instructions (yet): - const projectWithReplaySupport = useMemo( - () => - projects.filter( - p => p.platform && replayPlatforms.includes(p.platform) && !p.hasReplays - ), - [projects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!projectWithReplaySupport) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectForOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectForOnboarding) { - setCurrentProject(projectForOnboarding); - } - - // If we selected something that supports replays pick that - const projectSupportsReplay = projectWithReplaySupport.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsReplay) { - setCurrentProject(projectSupportsReplay); - } - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - // We have no selection, so pick a project which we've found - setCurrentProject(projectsWithOnboarding.at(0) || projectWithReplaySupport.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - projectWithReplaySupport, - ]); - - const {supported, unsupported} = useMemo(() => { - return splitProjectsByReplaySupport(projects); - }, [projects]); - - return { - projects: projectWithReplaySupport, - allProjects: projects, - supportedProjects: supported, - unsupportedProjects: unsupported, - currentProject, - setCurrentProject, - }; -} - -export default useCurrentProjectState; diff --git a/static/app/components/replaysOnboarding/utils.tsx b/static/app/components/replaysOnboarding/utils.tsx index 032a0b1575a4d2..53b9ec15ada10f 100644 --- a/static/app/components/replaysOnboarding/utils.tsx +++ b/static/app/components/replaysOnboarding/utils.tsx @@ -1,8 +1,6 @@ -import partition from 'lodash/partition'; - import {replayFrontendPlatforms, replayPlatforms} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; -import type {PlatformIntegration, PlatformKey, Project} from 'sentry/types'; +import type {PlatformIntegration, PlatformKey} from 'sentry/types'; export function generateDocKeys(platform: PlatformKey): string[] { const platformKey = platform.startsWith('javascript') @@ -17,16 +15,6 @@ export function isPlatformSupported(platform: undefined | PlatformIntegration) { return platform?.id ? replayPlatforms.includes(platform?.id) : false; } -export function splitProjectsByReplaySupport(projects: Project[]) { - const [supported, unsupported] = partition(projects, project => - replayPlatforms.includes(project.platform!) - ); - return { - supported, - unsupported, - }; -} - export const replayJsFrameworkOptions: PlatformIntegration[] = platforms.filter(p => replayFrontendPlatforms.includes(p.id) ); diff --git a/static/app/data/platformCategories.tsx b/static/app/data/platformCategories.tsx index 8737e46e1c8e96..bc76b683d4943f 100644 --- a/static/app/data/platformCategories.tsx +++ b/static/app/data/platformCategories.tsx @@ -517,17 +517,17 @@ const customMetricFrontendPlatforms: readonly PlatformKey[] = [ ]; // These are all the platforms that can set up custom metrics. -export const customMetricPlatforms: Set = new Set([ +export const customMetricPlatforms: readonly PlatformKey[] = [ ...customMetricFrontendPlatforms, ...customMetricBackendPlatforms, -]); +]; /** * The list of platforms for which we have created onboarding instructions. * Should be a subset of the list of `customMetricPlatforms`. */ -export const customMetricOnboardingPlatforms = new Set( - [...customMetricPlatforms].filter( +export const customMetricOnboardingPlatforms: readonly PlatformKey[] = + customMetricPlatforms.filter( p => // Legacy platforms that do not have in-product docs ![ @@ -537,5 +537,4 @@ export const customMetricOnboardingPlatforms = new Set( 'python-pylons', 'python-tryton', ].includes(p) - ) -); + ); diff --git a/static/app/views/ddm/ddmOnboarding/sidebar.tsx b/static/app/views/ddm/ddmOnboarding/sidebar.tsx index 46dcacf2714b38..fbdcc26ec8d99a 100644 --- a/static/app/views/ddm/ddmOnboarding/sidebar.tsx +++ b/static/app/views/ddm/ddmOnboarding/sidebar.tsx @@ -7,10 +7,14 @@ import {LinkButton} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import IdBadge from 'sentry/components/idBadge'; import {SdkDocumentation} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation'; +import useCurrentProjectState from 'sentry/components/onboarding/gettingStartedDoc/utils/useCurrentProjectState'; import SidebarPanel from 'sentry/components/sidebar/sidebarPanel'; import type {CommonSidebarProps} from 'sentry/components/sidebar/types'; import {SidebarPanelKey} from 'sentry/components/sidebar/types'; -import {customMetricPlatforms} from 'sentry/data/platformCategories'; +import { + customMetricOnboardingPlatforms, + customMetricPlatforms, +} from 'sentry/data/platformCategories'; import platforms from 'sentry/data/platforms'; import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; @@ -18,8 +22,6 @@ import type {Project, SelectValue} from 'sentry/types'; import {METRICS_DOCS_URL} from 'sentry/utils/metrics/constants'; import useOrganization from 'sentry/utils/useOrganization'; -import {useCurrentProjectState} from './useCurrentProjectState'; - function MetricsOnboardingSidebar(props: CommonSidebarProps) { const {currentPanel, collapsed, hidePanel, orientation} = props; const organization = useOrganization(); @@ -36,7 +38,10 @@ function MetricsOnboardingSidebar(props: CommonSidebarProps) { unsupportedProjects, hasDocs, } = useCurrentProjectState({ - isActive, + currentPanel, + targetPanel: SidebarPanelKey.METRICS_ONBOARDING, + onboardingPlatforms: customMetricOnboardingPlatforms, + allPlatforms: customMetricPlatforms, }); const projectSelectOptions = useMemo(() => { @@ -150,7 +155,7 @@ function OnboardingContent({ : undefined; const supportsCustomMetrics = - currentProject.platform && customMetricPlatforms.has(currentProject.platform); + currentProject.platform && customMetricPlatforms.includes(currentProject.platform); if (!supportsCustomMetrics) { return ( diff --git a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx b/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx deleted file mode 100644 index 3202ba7e8ea738..00000000000000 --- a/static/app/views/ddm/ddmOnboarding/useCurrentProjectState.tsx +++ /dev/null @@ -1,93 +0,0 @@ -import {useEffect, useMemo, useState} from 'react'; -import partition from 'lodash/partition'; - -import { - customMetricOnboardingPlatforms, - customMetricPlatforms, -} from 'sentry/data/platformCategories'; -import PageFiltersStore from 'sentry/stores/pageFiltersStore'; -import {useLegacyStore} from 'sentry/stores/useLegacyStore'; -import type {Project} from 'sentry/types'; -import useProjects from 'sentry/utils/useProjects'; - -export function useCurrentProjectState({isActive}: {isActive: boolean}) { - const [currentProject, setCurrentProject] = useState(undefined); - const {projects, initiallyLoaded: projectsLoaded} = useProjects(); - const {selection, isReady} = useLegacyStore(PageFiltersStore); - - const [supportedProjects, unsupportedProjects] = useMemo(() => { - return partition(projects, p => p.platform && customMetricPlatforms.has(p.platform)); - }, [projects]); - - // Projects where we have the onboarding instructions ready: - const projectsWithOnboarding = useMemo( - () => - supportedProjects.filter( - p => p.platform && customMetricOnboardingPlatforms.has(p.platform) - ), - [supportedProjects] - ); - - useEffect(() => { - if (!isActive) { - setCurrentProject(undefined); - } - }, [isActive]); - - useEffect(() => { - if (currentProject || !projectsLoaded || !projects.length || !isReady || !isActive) { - return; - } - - if (!supportedProjects) { - return; - } - - if (selection.projects.length) { - const selectedProjectIds = selection.projects.map(String); - // If we selected something that has onboarding instructions, pick that first - const projectWithOnboarding = projectsWithOnboarding.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectWithOnboarding) { - setCurrentProject(projectWithOnboarding); - return; - } - - // If we selected something that supports custom metrics pick that - const projectSupportsMetrics = supportedProjects.find(p => - selectedProjectIds.includes(p.id) - ); - if (projectSupportsMetrics) { - setCurrentProject(projectSupportsMetrics); - return; - } - // Else pick the first selected project - const firstSelectedProject = projects.find(p => selectedProjectIds.includes(p.id)); - setCurrentProject(firstSelectedProject); - } else { - setCurrentProject(projectsWithOnboarding.at(0) || supportedProjects.at(0)); - } - }, [ - currentProject, - projectsLoaded, - projects, - isReady, - isActive, - selection.projects, - projectsWithOnboarding, - supportedProjects, - ]); - - return { - projects: supportedProjects, - hasDocs: - !!currentProject?.platform && - customMetricOnboardingPlatforms.has(currentProject.platform), - allProjects: projects, - supportedProjects, - unsupportedProjects, - currentProject, - setCurrentProject, - }; -} From 8485db2c08c744eea98b3cc28f4caf15eee78f01 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 6 Mar 2024 11:48:07 -0600 Subject: [PATCH 109/145] fix(metrics): Adjust bounding boxes based on operation (#66339) The bounding boxes right now always bound the metric on the min/max. But depending on the operation, we should look at different values to bound determine if it lies in the bounding box. For example, if graphing the min, we should ensure the min is with in the bounding box. --- .../api/endpoints/organization_metrics.py | 2 + .../events/datasets/metrics_summaries.py | 8 +++ .../sentry_metrics/querying/samples_list.py | 22 +++++-- .../endpoints/test_organization_metrics.py | 57 +++++++++++-------- 4 files changed, 59 insertions(+), 30 deletions(-) diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py index 8fe041f0053d78..39038ffe4c82ea 100644 --- a/src/sentry/api/endpoints/organization_metrics.py +++ b/src/sentry/api/endpoints/organization_metrics.py @@ -474,6 +474,7 @@ class MetricsSamplesSerializer(serializers.Serializer): field = serializers.ListField(required=True, allow_empty=False, child=serializers.CharField()) max = serializers.FloatField(required=False) min = serializers.FloatField(required=False) + operation = serializers.CharField(required=False) query = serializers.CharField(required=False) referrer = serializers.CharField(required=False) sort = serializers.CharField(required=False) @@ -532,6 +533,7 @@ def get(self, request: Request, organization: Organization) -> Response: params, snuba_params, serialized["field"], + serialized.get("operation"), serialized.get("query", ""), serialized.get("min"), serialized.get("max"), diff --git a/src/sentry/search/events/datasets/metrics_summaries.py b/src/sentry/search/events/datasets/metrics_summaries.py index f6ce6160d0bc06..3f9f9ac7dcb1ac 100644 --- a/src/sentry/search/events/datasets/metrics_summaries.py +++ b/src/sentry/search/events/datasets/metrics_summaries.py @@ -31,6 +31,7 @@ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]: return { constants.PROJECT_ALIAS: self._resolve_project_slug_alias, constants.PROJECT_NAME_ALIAS: self._resolve_project_slug_alias, + "avg_metric": self._resolve_avg_alias, } @property @@ -91,3 +92,10 @@ def _metric_filter_converter(self, search_filter: SearchFilter) -> WhereType | N def _resolve_project_slug_alias(self, alias: str) -> SelectType: return field_aliases.resolve_project_slug_alias(self.builder, alias) + + def _resolve_avg_alias(self, alias: str) -> SelectType: + return Function( + "divide", + [self.builder.column("sum_metric"), self.builder.column("count_metric")], + alias, + ) diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py index d18821c0c4c8ff..1ce0f7f0c1ec00 100644 --- a/src/sentry/sentry_metrics/querying/samples_list.py +++ b/src/sentry/sentry_metrics/querying/samples_list.py @@ -39,6 +39,7 @@ def __init__( params: ParamsType, snuba_params: SnubaParams, fields: list[str], + operation: str | None, query: str | None, min: float | None, max: float | None, @@ -50,6 +51,7 @@ def __init__( self.params = params self.snuba_params = snuba_params self.fields = fields + self.operation = operation self.query = query self.min = min self.max = max @@ -596,6 +598,12 @@ class CustomSamplesListExecutor(AbstractSamplesListExecutor): "timestamp": "timestamp", } + MIN_MAX_CONDITION_COLUMN = { + "min": "min_metric", + "max": "max_metric", + "count": "count_metric", + } + @classmethod def convert_sort(cls, sort) -> tuple[Literal["", "-"], str] | None: direction: Literal["", "-"] = "" @@ -660,7 +668,7 @@ def get_sorted_span_keys( ) additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions() + min_max_conditions = self.get_min_max_conditions(builder) builder.add_conditions([*additional_conditions, *min_max_conditions]) query_results = builder.run_query(self.referrer.value) @@ -720,7 +728,7 @@ def get_unsorted_span_keys( ) additional_conditions = self.get_additional_conditions(builder) - min_max_conditions = self.get_min_max_conditions() + min_max_conditions = self.get_min_max_conditions(builder) builder.add_conditions([*additional_conditions, *min_max_conditions]) query_results = builder.run_query(self.referrer.value) @@ -762,13 +770,17 @@ def get_additional_conditions(self, builder: QueryBuilder) -> list[Condition]: ) ] - def get_min_max_conditions(self) -> list[Condition]: + def get_min_max_conditions(self, builder: QueryBuilder) -> list[Condition]: conditions = [] + column = builder.resolve_column( + self.MIN_MAX_CONDITION_COLUMN.get(self.operation or "", "avg_metric") + ) + if self.min is not None: - conditions.append(Condition(Column("min"), Op.GTE, self.min)) + conditions.append(Condition(column, Op.GTE, self.min)) if self.max is not None: - conditions.append(Condition(Column("max"), Op.LTE, self.max)) + conditions.append(Condition(column, Op.LTE, self.max)) return conditions diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py index a036d56716d1b9..62725b9e8d7a5b 100644 --- a/tests/sentry/api/endpoints/test_organization_metrics.py +++ b/tests/sentry/api/endpoints/test_organization_metrics.py @@ -528,8 +528,8 @@ def test_custom_samples(self): { "min": val, "max": val, - "sum": val, - "count": 1, + "sum": val * (i + 1) * 2, + "count": (i + 1) * 2, "tags": {}, } ] @@ -556,27 +556,34 @@ def test_custom_samples(self): }, ) - query = { - "mri": mri, - "field": ["id"], - "project": [self.project.id], - "statsPeriod": "14d", - "min": 150.0, - "max": 250.0, - } - response = self.do_request(query) - assert response.status_code == 200, response.data - expected = {int(good_span_id, 16)} - actual = {int(row["id"], 16) for row in response.data["data"]} - assert actual == expected - - for row in response.data["data"]: - assert row["summary"] == { - "min": 200.0, - "max": 200.0, - "sum": 200.0, - "count": 1, + for operation, min_bound, max_bound in [ + ("avg", 150.0, 250.0), + ("min", 150.0, 250.0), + ("max", 150.0, 250.0), + ("count", 3, 5), + ]: + query = { + "mri": mri, + "field": ["id"], + "project": [self.project.id], + "statsPeriod": "14d", + "min": min_bound, + "max": max_bound, + "operation": operation, } + response = self.do_request(query) + assert response.status_code == 200, (operation, response.data) + expected = {int(good_span_id, 16)} + actual = {int(row["id"], 16) for row in response.data["data"]} + assert actual == expected, operation + + for row in response.data["data"]: + assert row["summary"] == { + "min": 200.0, + "max": 200.0, + "sum": 800.0, + "count": 4, + }, operation query = { "mri": mri, @@ -591,10 +598,10 @@ def test_custom_samples(self): actual = {int(row["id"], 16) for row in response.data["data"]} assert actual == expected - for val, row in zip(reversed(values), response.data["data"]): + for i, (val, row) in enumerate(zip(reversed(values), response.data["data"])): assert row["summary"] == { "min": val, "max": val, - "sum": val, - "count": 1, + "sum": val * (len(values) - i) * 2, + "count": (len(values) - i) * 2, } From 90b2ace7824c4cbc2470eacbf7693efa1b8420d1 Mon Sep 17 00:00:00 2001 From: Seiji Chew <67301797+schew2381@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:01:58 -0800 Subject: [PATCH 110/145] chore(superuser): Add loggers to _needs_validation (#66364) There's a bug where we're no longer requiring superuser access category and reason validation, which is caused by https://github.com/getsentry/sentry/pull/66043/files. I want to validate what those booleans are in order to find out if this is the cause of the bug. --- src/sentry/api/endpoints/auth_index.py | 5 ----- src/sentry/auth/superuser.py | 10 +++++++++- tests/sentry/auth/test_superuser.py | 4 ++-- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py index b89f97c61e8f91..dfd764a6e30eff 100644 --- a/src/sentry/api/endpoints/auth_index.py +++ b/src/sentry/api/endpoints/auth_index.py @@ -1,6 +1,5 @@ import logging -from django.conf import settings from django.contrib.auth import logout from django.contrib.auth.models import AnonymousUser from django.utils.http import url_has_allowed_host_and_scheme @@ -33,10 +32,6 @@ PREFILLED_SU_MODAL_KEY = "prefilled_su_modal" -DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = getattr( - settings, "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL", False -) - @control_silo_endpoint class BaseAuthIndexEndpoint(Endpoint): diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py index f2f15518f24048..5747c7ea55c556 100644 --- a/src/sentry/auth/superuser.py +++ b/src/sentry/auth/superuser.py @@ -179,7 +179,15 @@ def __init__(self, request, allowed_ips=UNSET, org_id=UNSET, current_datetime=No @staticmethod def _needs_validation(): - if is_self_hosted() or DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL: + self_hosted = is_self_hosted() + logger.info( + "superuser.needs-validation", + extra={ + "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL": DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL, + "self_hosted": self_hosted, + }, + ) + if self_hosted or DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL: return False return settings.VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON diff --git a/tests/sentry/auth/test_superuser.py b/tests/sentry/auth/test_superuser.py index 4aa977d2f34142..94dee1ed58c121 100644 --- a/tests/sentry/auth/test_superuser.py +++ b/tests/sentry/auth/test_superuser.py @@ -191,7 +191,7 @@ def test_su_access_logs(self, logger): superuser = Superuser(request, org_id=None) superuser.set_logged_in(request.user) assert superuser.is_active is True - assert logger.info.call_count == 2 + assert logger.info.call_count == 3 logger.info.assert_any_call( "superuser.superuser_access", extra={ @@ -411,7 +411,7 @@ def test_superuser_session_doesnt_need_validation_superuser_prompts(self, logger superuser = Superuser(request, org_id=None) superuser.set_logged_in(request.user) assert superuser.is_active is True - assert logger.info.call_count == 1 + assert logger.info.call_count == 2 logger.info.assert_any_call( "superuser.logged-in", extra={"ip_address": "127.0.0.1", "user_id": user.id}, From ff5e9d2a13cc47425324b9ddc4632e299c0c70ab Mon Sep 17 00:00:00 2001 From: edwardgou-sentry <83961295+edwardgou-sentry@users.noreply.github.com> Date: Wed, 6 Mar 2024 13:09:06 -0500 Subject: [PATCH 111/145] feat(discover): allows filtering metric queries on span.op (#66423) Adds `span.op` to `DEFAULT_METRIC_TAGS` so that it is available in discover metrics queries. --- src/sentry/search/events/constants.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/sentry/search/events/constants.py b/src/sentry/search/events/constants.py index 761de77c59d51c..ebf18f77d23fa7 100644 --- a/src/sentry/search/events/constants.py +++ b/src/sentry/search/events/constants.py @@ -316,6 +316,7 @@ class ThresholdDict(TypedDict): "transaction.method", "transaction.op", "transaction.status", + "span.op", } SPAN_METRICS_MAP = { "user": "s:spans/user@none", From 8bf591b9a16258bd7a689f0737c0d7dadf09e427 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:20:04 -0800 Subject: [PATCH 112/145] ref(replay): see full replay in rage click issue goes to breadcrumbs tab (#66374) From a rage click issue, "see full replay" takes you to the breadcrumbs tab with the rage & dead click filter selected. Relates to https://github.com/getsentry/team-replay/issues/394. Temp solution until we can load in rage click issues to the errors tab https://github.com/getsentry/sentry/assets/56095982/cda9511d-8458-4fc4-a506-0b36ed217d51 --- static/app/components/events/eventReplay/index.tsx | 1 + .../events/eventReplay/replayClipPreview.tsx | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/static/app/components/events/eventReplay/index.tsx b/static/app/components/events/eventReplay/index.tsx index 087aa25223b74c..51a1949b68d113 100644 --- a/static/app/components/events/eventReplay/index.tsx +++ b/static/app/components/events/eventReplay/index.tsx @@ -110,6 +110,7 @@ function EventReplayContent({ {...commonProps} component={replayClipPreview} clipOffsets={CLIP_OFFSETS} + issueCategory={group?.issueCategory} /> ) : ( diff --git a/static/app/components/events/eventReplay/replayClipPreview.tsx b/static/app/components/events/eventReplay/replayClipPreview.tsx index eba3ea329b980f..de444df33da8c1 100644 --- a/static/app/components/events/eventReplay/replayClipPreview.tsx +++ b/static/app/components/events/eventReplay/replayClipPreview.tsx @@ -28,6 +28,7 @@ import TimeAndScrubberGrid from 'sentry/components/replays/timeAndScrubberGrid'; import {IconDelete} from 'sentry/icons'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; +import {IssueCategory} from 'sentry/types'; import EventView from 'sentry/utils/discover/eventView'; import getRouteStringFromRoutes from 'sentry/utils/getRouteStringFromRoutes'; import {TabKey} from 'sentry/utils/replays/hooks/useActiveReplayTab'; @@ -57,6 +58,7 @@ type Props = { replaySlug: string; focusTab?: TabKey; fullReplayButtonProps?: Partial>; + issueCategory?: IssueCategory; }; function getReplayAnalyticsStatus({ @@ -85,10 +87,12 @@ function ReplayPreviewPlayer({ replayId, fullReplayButtonProps, replayRecord, + issueCategory, }: { replayId: string; replayRecord: ReplayRecord; fullReplayButtonProps?: Partial>; + issueCategory?: IssueCategory; }) { const routes = useRoutes(); const location = useLocation(); @@ -104,12 +108,15 @@ function ReplayPreviewPlayer({ const isFullscreen = useIsFullscreen(); const startOffsetMs = replay?.getStartOffsetMs() ?? 0; + const isRageClickIssue = issueCategory === IssueCategory.REPLAY; + const fullReplayUrl = { pathname: normalizeUrl(`/organizations/${organization.slug}/replays/${replayId}/`), query: { referrer: getRouteStringFromRoutes(routes), - t_main: TabKey.ERRORS, + t_main: isRageClickIssue ? TabKey.BREADCRUMBS : TabKey.ERRORS, t: (currentTime + startOffsetMs) / 1000, + f_b_type: isRageClickIssue ? 'rageOrDead' : undefined, }, }; @@ -169,6 +176,7 @@ function ReplayClipPreview({ orgSlug, replaySlug, fullReplayButtonProps, + issueCategory, }: Props) { const clipWindow = useMemo( () => ({ @@ -239,6 +247,7 @@ function ReplayClipPreview({ replayId={replayId} fullReplayButtonProps={fullReplayButtonProps} replayRecord={replayRecord} + issueCategory={issueCategory} /> )} From a5e235ccca82b951458faf5b3615390f2c7e6e51 Mon Sep 17 00:00:00 2001 From: George Gritsouk <989898+gggritso@users.noreply.github.com> Date: Wed, 6 Mar 2024 13:22:45 -0500 Subject: [PATCH 113/145] ref(perf): Remove usage of generic-ish `SpanMetricsRibbon` (#66415) Another refactor peeled away from #66229. The `SpanMetricsRibbon` component is used in two places: 1. The top right of the Query Summary page 2. The completely unused generic span summary page The ribbon is a little silly because it supports just these two use cases! This is much better done using the new `MetricReadout` by using it directly in the parent. No visual changes. --- .../database/databaseSpanSummaryPage.tsx | 41 ++++++++++++-- .../app/views/performance/metricReadout.tsx | 12 ++++- .../components/tableCells/timeSpentCell.tsx | 25 +++++---- .../spanMetricsRibbon.spec.tsx | 22 -------- .../spanSummaryPage/spanMetricsRibbon.tsx | 54 ------------------- .../views/spanSummaryPage/spanSummaryView.tsx | 42 +++++++++++++-- 6 files changed, 102 insertions(+), 94 deletions(-) delete mode 100644 static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.spec.tsx delete mode 100644 static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.tsx diff --git a/static/app/views/performance/database/databaseSpanSummaryPage.tsx b/static/app/views/performance/database/databaseSpanSummaryPage.tsx index f3cfcfa8a770f6..9081c09db57f28 100644 --- a/static/app/views/performance/database/databaseSpanSummaryPage.tsx +++ b/static/app/views/performance/database/databaseSpanSummaryPage.tsx @@ -10,25 +10,27 @@ import {EnvironmentPageFilter} from 'sentry/components/organizations/environment import PageFilterBar from 'sentry/components/organizations/pageFilterBar'; import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import type {Sort} from 'sentry/utils/discover/fields'; +import {DurationUnit, RateUnit, type Sort} from 'sentry/utils/discover/fields'; import {useLocation} from 'sentry/utils/useLocation'; import useOrganization from 'sentry/utils/useOrganization'; import {normalizeUrl} from 'sentry/utils/withDomainRequired'; import {DurationChart} from 'sentry/views/performance/database/durationChart'; import {ThroughputChart} from 'sentry/views/performance/database/throughputChart'; import {useSelectedDurationAggregate} from 'sentry/views/performance/database/useSelectedDurationAggregate'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; import * as ModuleLayout from 'sentry/views/performance/moduleLayout'; import {ModulePageProviders} from 'sentry/views/performance/modulePageProviders'; import {useSynchronizeCharts} from 'sentry/views/starfish/components/chart'; import {DatabaseSpanDescription} from 'sentry/views/starfish/components/spanDescription'; +import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics'; import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useSpanMetricsSeries'; import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters'; +import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; import {useModuleSort} from 'sentry/views/starfish/views/spans/useModuleSort'; import {SampleList} from 'sentry/views/starfish/views/spanSummaryPage/sampleList'; -import {SpanMetricsRibbon} from 'sentry/views/starfish/views/spanSummaryPage/spanMetricsRibbon'; import {SpanTransactionsTable} from 'sentry/views/starfish/views/spanSummaryPage/spanTransactionsTable'; type Query = { @@ -64,7 +66,7 @@ function SpanSummaryPage({params}: Props) { const sort = useModuleSort(QueryParameterNames.ENDPOINTS_SORT, DEFAULT_SORT); - const {data} = useSpanMetrics({ + const {data, isLoading: areSpanMetricsLoading} = useSpanMetrics({ filters, fields: [ SpanMetricsField.SPAN_OP, @@ -157,7 +159,32 @@ function SpanSummaryPage({params}: Props) { - + + + + + + + @@ -239,4 +266,10 @@ const DescriptionContainer = styled(ModuleLayout.Full)` line-height: 1.2; `; +const MetricsRibbon = styled('div')` + display: flex; + flex-wrap: wrap; + gap: ${space(4)}; +`; + export default SpanSummaryPage; diff --git a/static/app/views/performance/metricReadout.tsx b/static/app/views/performance/metricReadout.tsx index b3b411a518887f..d0d0ea0b2466d5 100644 --- a/static/app/views/performance/metricReadout.tsx +++ b/static/app/views/performance/metricReadout.tsx @@ -33,7 +33,11 @@ export function MetricReadout(props: Props) { function ReadoutContent({unit, value, tooltip, align = 'right', isLoading}: Props) { if (isLoading) { - return ; + return ( + + + + ); } if (!defined(value)) { @@ -98,6 +102,12 @@ const NumberContainer = styled('div')<{align: 'left' | 'right'}>` font-variant-numeric: tabular-nums; `; +const LoadingContainer = styled('div')<{align: 'left' | 'right'}>` + display: flex; + justify-content: ${p => (p.align === 'right' ? 'flex-end' : 'flex-start')}; + align-items: center; +`; + function isARateUnit(unit: string): unit is RateUnit { return (Object.values(RateUnit) as string[]).includes(unit); } diff --git a/static/app/views/starfish/components/tableCells/timeSpentCell.tsx b/static/app/views/starfish/components/tableCells/timeSpentCell.tsx index 2a3fe8ff539cfb..23cc830bd39a78 100644 --- a/static/app/views/starfish/components/tableCells/timeSpentCell.tsx +++ b/static/app/views/starfish/components/tableCells/timeSpentCell.tsx @@ -22,9 +22,22 @@ interface Props { } export function TimeSpentCell({percentage, total, op, containerProps}: Props) { - const formattedPercentage = formatPercentage(clamp(percentage ?? 0, 0, 1)); const formattedTotal = getDuration((total ?? 0) / 1000, 2, true); - const tooltip = tct( + const tooltip = percentage ? getTimeSpentExplanation(percentage, op) : undefined; + + return ( + + + {defined(total) ? formattedTotal : '--'} + + + ); +} + +export function getTimeSpentExplanation(percentage: number, op?: string) { + const formattedPercentage = formatPercentage(clamp(percentage ?? 0, 0, 1)); + + return tct( 'The application spent [percentage] of its total time on this [span]. Read more about Time Spent in our [documentation:documentation].', { percentage: formattedPercentage, @@ -34,12 +47,4 @@ export function TimeSpentCell({percentage, total, op, containerProps}: Props) { ), } ); - - return ( - - - {defined(total) ? formattedTotal : '--'} - - - ); } diff --git a/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.spec.tsx b/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.spec.tsx deleted file mode 100644 index 84fb8f57a30338..00000000000000 --- a/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.spec.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import {render, screen} from 'sentry-test/reactTestingLibrary'; - -import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; -import {SpanMetricsRibbon} from 'sentry/views/starfish/views/spanSummaryPage/spanMetricsRibbon'; - -describe('SpanMetricsRibbon', function () { - const sampleMetrics = { - [SpanMetricsField.SPAN_OP]: 'db', - [`${SpanFunction.SPM}()`]: 17.8, - [`avg(${SpanMetricsField.SPAN_SELF_TIME})`]: 127.1, - [`sum(${SpanMetricsField.SPAN_SELF_TIME})`]: 1172319, - [`${SpanFunction.TIME_SPENT_PERCENTAGE}()`]: 0.002, - }; - - it('renders basic metrics', function () { - render(); - - expect(screen.getByText('17.8/min')).toBeInTheDocument(); - expect(screen.getByText('127.10ms')).toBeInTheDocument(); - expect(screen.getByText('19.54min')).toBeInTheDocument(); - }); -}); diff --git a/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.tsx b/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.tsx deleted file mode 100644 index 24a757242e924e..00000000000000 --- a/static/app/views/starfish/views/spanSummaryPage/spanMetricsRibbon.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import {t} from 'sentry/locale'; -import {RateUnit} from 'sentry/utils/discover/fields'; -import {CountCell} from 'sentry/views/starfish/components/tableCells/countCell'; -import {DurationCell} from 'sentry/views/starfish/components/tableCells/durationCell'; -import {ThroughputCell} from 'sentry/views/starfish/components/tableCells/throughputCell'; -import {TimeSpentCell} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; -import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; -import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types'; -import {Block, BlockContainer} from 'sentry/views/starfish/views/spanSummaryPage/block'; - -interface Props { - spanMetrics: { - [SpanMetricsField.SPAN_OP]?: string; - [SpanMetricsField.SPAN_DESCRIPTION]?: string; - [SpanMetricsField.SPAN_ACTION]?: string; - [SpanMetricsField.SPAN_DOMAIN]?: string[]; - [SpanMetricsField.SPAN_GROUP]?: string; - }; -} - -export function SpanMetricsRibbon({spanMetrics}: Props) { - const op = spanMetrics?.[SpanMetricsField.SPAN_OP] ?? ''; - - return ( - - - - - - - - - - {op.startsWith('http') && ( - - - - )} - - - - - - ); -} diff --git a/static/app/views/starfish/views/spanSummaryPage/spanSummaryView.tsx b/static/app/views/starfish/views/spanSummaryPage/spanSummaryView.tsx index 0a8781e1f9188e..9af1829ba288d2 100644 --- a/static/app/views/starfish/views/spanSummaryPage/spanSummaryView.tsx +++ b/static/app/views/starfish/views/spanSummaryPage/spanSummaryView.tsx @@ -1,15 +1,18 @@ import {Fragment} from 'react'; import styled from '@emotion/styled'; +import {t} from 'sentry/locale'; import {space} from 'sentry/styles/space'; -import {RateUnit} from 'sentry/utils/discover/fields'; +import {DurationUnit, RateUnit} from 'sentry/utils/discover/fields'; import {formatRate} from 'sentry/utils/formatters'; import {useLocation} from 'sentry/utils/useLocation'; +import {MetricReadout} from 'sentry/views/performance/metricReadout'; import {AVG_COLOR, ERRORS_COLOR, THROUGHPUT_COLOR} from 'sentry/views/starfish/colours'; import Chart, {useSynchronizeCharts} from 'sentry/views/starfish/components/chart'; import ChartPanel from 'sentry/views/starfish/components/chartPanel'; import StarfishDatePicker from 'sentry/views/starfish/components/datePicker'; import {SpanDescription} from 'sentry/views/starfish/components/spanDescription'; +import {getTimeSpentExplanation} from 'sentry/views/starfish/components/tableCells/timeSpentCell'; import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics'; import {useSpanMetricsSeries} from 'sentry/views/starfish/queries/useSpanMetricsSeries'; import type {SpanMetricsQueryFilters} from 'sentry/views/starfish/types'; @@ -17,9 +20,9 @@ import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types'; import { DataTitles, getThroughputChartTitle, + getThroughputTitle, } from 'sentry/views/starfish/views/spans/types'; import {Block, BlockContainer} from 'sentry/views/starfish/views/spanSummaryPage/block'; -import {SpanMetricsRibbon} from 'sentry/views/starfish/views/spanSummaryPage/spanMetricsRibbon'; const CHART_HEIGHT = 160; @@ -84,6 +87,8 @@ export function SpanSummaryView({groupId}: Props) { [SpanMetricsField.SPAN_GROUP]: string; }; + const op = span?.[SpanMetricsField.SPAN_OP] ?? ''; + const {isLoading: areSpanMetricsSeriesLoading, data: spanMetricsSeriesData} = useSpanMetricsSeries({ filters: {'span.group': groupId, ...seriesQueryFilter}, @@ -107,7 +112,38 @@ export function SpanSummaryView({groupId}: Props) { - + + + + + + {op.startsWith('http') && ( + + )} + + + {span?.[SpanMetricsField.SPAN_DESCRIPTION] && ( From 300a6045bd2e4ea2a5993d8c757acbd9d5ead4bb Mon Sep 17 00:00:00 2001 From: Lyn Nagara Date: Wed, 6 Mar 2024 10:28:11 -0800 Subject: [PATCH 114/145] ref: Remove old style topic definition from ingest consumer code (#66352) settings.KAFKA_INGEST_EVENTS, settings.KAFKA_INGEST_ATTACHMENTS and settings.KAFKA_INGEST_TRANSACTIONS are deprecated and will be removed. --- src/sentry/ingest/types.py | 16 ---------------- .../test_ingest_consumer_kafka.py | 10 +++++++--- 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/src/sentry/ingest/types.py b/src/sentry/ingest/types.py index f0dd0c6ad36bf5..0f07507dd8ef43 100644 --- a/src/sentry/ingest/types.py +++ b/src/sentry/ingest/types.py @@ -6,19 +6,3 @@ class ConsumerType: Events = "events" # consumes simple events ( from the Events topic) Attachments = "attachments" # consumes events with attachments ( from the Attachments topic) Transactions = "transactions" # consumes transaction events ( from the Transactions topic) - - @staticmethod - def all(): - return (ConsumerType.Events, ConsumerType.Attachments, ConsumerType.Transactions) - - @staticmethod - def get_topic_name(consumer_type): - from django.conf import settings - - if consumer_type == ConsumerType.Events: - return settings.KAFKA_INGEST_EVENTS - elif consumer_type == ConsumerType.Attachments: - return settings.KAFKA_INGEST_ATTACHMENTS - elif consumer_type == ConsumerType.Transactions: - return settings.KAFKA_INGEST_TRANSACTIONS - raise ValueError("Invalid consumer type", consumer_type) diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py index 2654e9aa85276e..38fecef6aa9978 100644 --- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py +++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py @@ -9,14 +9,15 @@ from django.conf import settings from sentry import eventstore +from sentry.conf.types.kafka_definition import Topic from sentry.consumers import get_stream_processor from sentry.event_manager import EventManager from sentry.eventstore.processing import event_processing_store -from sentry.ingest.types import ConsumerType from sentry.testutils.pytest.fixtures import django_db_all from sentry.testutils.skips import requires_kafka, requires_snuba from sentry.utils import json from sentry.utils.batching_kafka_consumer import create_topics +from sentry.utils.kafka_config import get_topic_definition pytestmark = [requires_snuba, requires_kafka] @@ -101,7 +102,9 @@ def test_ingest_consumer_reads_from_topic_and_calls_celery_task( get_test_message, random_group_id, ): - topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events) + + topic = Topic.INGEST_EVENTS + topic_event_name = get_topic_definition(topic)["real_topic_name"] admin = kafka_admin(settings) admin.delete_topic(topic_event_name) @@ -157,7 +160,8 @@ def test_ingest_consumer_gets_event_unstuck( get_test_message, random_group_id, ): - topic_event_name = ConsumerType.get_topic_name(ConsumerType.Events) + topic = Topic.INGEST_EVENTS + topic_event_name = get_topic_definition(topic)["real_topic_name"] admin = kafka_admin(settings) admin.delete_topic(topic_event_name) From c7e9a435e15989ca3b2087a4cd5a948dd1b62277 Mon Sep 17 00:00:00 2001 From: Michelle Zhang <56095982+michellewzhang@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:49:36 -0800 Subject: [PATCH 115/145] feat(feedback/issues): add sidebar to issue details UF empty state (#66351) - add a sidebar popup when "set up now" is clicked - add crash report modal onboarding config for javascript with widget callout SCR-20240305-ocgr --- .../feedback/feedbackOnboarding/sidebar.tsx | 18 +++++--- .../feedback/feedbackSetupPanel.tsx | 8 +++- .../feedback/useFeedbackOnboarding.tsx | 19 +++++++-- .../feedback/widgetCallout.tsx | 16 ++++++++ .../onboarding/gettingStartedDoc/types.ts | 2 + .../utils/feedbackOnboarding.tsx | 41 +++++++++++++++++++ .../javascript/javascript.tsx | 30 ++++++++++++++ .../analytics/feedbackAnalyticsEvents.tsx | 2 +- .../views/userFeedback/userFeedbackEmpty.tsx | 30 ++++++++++---- 9 files changed, 148 insertions(+), 18 deletions(-) create mode 100644 static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx index 3094025285d62d..8882ab74f54b1f 100644 --- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx +++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx @@ -9,6 +9,7 @@ import {Button} from 'sentry/components/button'; import {CompactSelect} from 'sentry/components/compactSelect'; import {FeedbackOnboardingLayout} from 'sentry/components/feedback/feedbackOnboarding/feedbackOnboardingLayout'; import useLoadFeedbackOnboardingDoc from 'sentry/components/feedback/feedbackOnboarding/useLoadFeedbackOnboardingDoc'; +import {CRASH_REPORT_HASH} from 'sentry/components/feedback/useFeedbackOnboarding'; import RadioGroup from 'sentry/components/forms/controls/radioGroup'; import IdBadge from 'sentry/components/idBadge'; import LoadingIndicator from 'sentry/components/loadingIndicator'; @@ -34,6 +35,7 @@ import {t, tct} from 'sentry/locale'; import {space} from 'sentry/styles/space'; import type {PlatformKey, Project, SelectValue} from 'sentry/types'; import useOrganization from 'sentry/utils/useOrganization'; +import {useRouteContext} from 'sentry/utils/useRouteContext'; import useUrlParams from 'sentry/utils/useUrlParams'; function FeedbackOnboardingSidebar(props: CommonSidebarProps) { @@ -152,6 +154,8 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { }>(jsFrameworkSelectOptions[0]); const defaultTab = 'npm'; + const {location} = useRouteContext(); + const crashReportOnboarding = location.hash === CRASH_REPORT_HASH; const {getParamValue: setupMode, setParamValue: setSetupMode} = useUrlParams( 'mode', @@ -173,9 +177,9 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { .filter(p => p !== 'javascript') .includes(currentPlatform.id); - const showRadioButtons = replayJsLoaderInstructionsPlatformList.includes( - currentPlatform.id - ); + const showRadioButtons = + replayJsLoaderInstructionsPlatformList.includes(currentPlatform.id) && + !crashReportOnboarding; function getJsFramework() { return ( @@ -202,7 +206,7 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { projectSlug: currentProject.slug, }); - if (webApiPlatform) { + if (webApiPlatform && !crashReportOnboarding) { return ; } @@ -250,7 +254,8 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { /> ) : ( newDocs?.platformOptions && - widgetPlatform && ( + widgetPlatform && + !crashReportOnboarding && ( {tct("I'm using [platformSelect]", { platformSelect: ( @@ -300,6 +305,9 @@ function OnboardingContent({currentProject}: {currentProject: Project}) { } function getConfig() { + if (crashReportOnboarding) { + return 'crashReportOnboarding'; + } if (crashApiPlatform) { return 'feedbackOnboardingCrashApi'; } diff --git a/static/app/components/feedback/feedbackSetupPanel.tsx b/static/app/components/feedback/feedbackSetupPanel.tsx index 9c9f1530c5b1d8..9c7bda62c43014 100644 --- a/static/app/components/feedback/feedbackSetupPanel.tsx +++ b/static/app/components/feedback/feedbackSetupPanel.tsx @@ -37,7 +37,13 @@ export default function FeedbackSetupPanel() { )}

{hasNewOnboarding ? ( - ) : ( diff --git a/static/app/components/feedback/useFeedbackOnboarding.tsx b/static/app/components/feedback/useFeedbackOnboarding.tsx index 024c0fec28c936..cfe1e0f5ddb573 100644 --- a/static/app/components/feedback/useFeedbackOnboarding.tsx +++ b/static/app/components/feedback/useFeedbackOnboarding.tsx @@ -7,6 +7,9 @@ import useSelectedProjectsHaveField from 'sentry/utils/project/useSelectedProjec import useOrganization from 'sentry/utils/useOrganization'; import {useRouteContext} from 'sentry/utils/useRouteContext'; +export const CRASH_REPORT_HASH = '#crashreport-sidequest'; +export const FEEDBACK_HASH = '#feedback-sidequest'; + export default function useHaveSelectedProjectsSetupFeedback() { const {hasField: hasSetupOneFeedback, fetching} = useSelectedProjectsHaveField('hasFeedbacks'); @@ -24,8 +27,9 @@ export function useFeedbackOnboardingSidebarPanel() { const organization = useOrganization(); useEffect(() => { - if (location.hash === '#feedback-sidequest') { + if (location.hash === FEEDBACK_HASH || location.hash === CRASH_REPORT_HASH) { SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); + // this tracks clicks from both feedback index and issue details feedback tab trackAnalytics('feedback.list-view-setup-sidebar', { organization, }); @@ -34,9 +38,18 @@ export function useFeedbackOnboardingSidebarPanel() { const activateSidebar = useCallback((event: {preventDefault: () => void}) => { event.preventDefault(); - window.location.hash = 'feedback-sidequest'; + window.location.hash = FEEDBACK_HASH; SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); }, []); - return {activateSidebar}; + const activateSidebarIssueDetails = useCallback( + (event: {preventDefault: () => void}) => { + event.preventDefault(); + window.location.hash = CRASH_REPORT_HASH; + SidebarPanelStore.activatePanel(SidebarPanelKey.FEEDBACK_ONBOARDING); + }, + [] + ); + + return {activateSidebar, activateSidebarIssueDetails}; } diff --git a/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx b/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx new file mode 100644 index 00000000000000..0d5bad506b7c14 --- /dev/null +++ b/static/app/components/onboarding/gettingStartedDoc/feedback/widgetCallout.tsx @@ -0,0 +1,16 @@ +import Alert from 'sentry/components/alert'; +import ExternalLink from 'sentry/components/links/externalLink'; +import {tct} from 'sentry/locale'; + +export default function widgetCallout({link}: {link: string}) { + return ( + + {tct( + `Want to receive user feedback at any time, not just when an error happens? [link:Read the docs] to learn how to set up our customizable widget.`, + { + link: , + } + )} + + ); +} diff --git a/static/app/components/onboarding/gettingStartedDoc/types.ts b/static/app/components/onboarding/gettingStartedDoc/types.ts index a1be3b5c67f279..5c0eb1ed0363bf 100644 --- a/static/app/components/onboarding/gettingStartedDoc/types.ts +++ b/static/app/components/onboarding/gettingStartedDoc/types.ts @@ -80,6 +80,7 @@ export interface OnboardingConfig< export interface Docs { onboarding: OnboardingConfig; + crashReportOnboarding?: OnboardingConfig; customMetricsOnboarding?: OnboardingConfig; feedbackOnboardingCrashApi?: OnboardingConfig; feedbackOnboardingNpm?: OnboardingConfig; @@ -92,6 +93,7 @@ export type ConfigType = | 'onboarding' | 'feedbackOnboardingNpm' | 'feedbackOnboardingCrashApi' + | 'crashReportOnboarding' | 'replayOnboardingNpm' | 'replayOnboardingJsLoader' | 'customMetricsOnboarding'; diff --git a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx index a16ffc3d9b77bc..c5ec3080a2ba2a 100644 --- a/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx +++ b/static/app/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding.tsx @@ -82,3 +82,44 @@ export const getFeedbackConfigOptions = ({ } return options.join('\n'); }; + +export const getCrashReportModalIntroduction = () => + t( + 'Collect feedback on your errors by installing our crash-report modal. This allows users to submit feedback after they experience an error via an automatic modal that pops up after an error occurs. The default modal will prompt the user for their name, email address, and description of what occurred.' + ); + +export const getCrashReportModalInstallDescriptionJavaScript = () => + tct( + 'You can collect feedback at the time the event is sent, using [code:beforeSend].', + {code: } + ); + +export const getCrashReportModalConfigDescription = ({link}: {link: string}) => + tct( + 'There are many options you can pass to the [code:Sentry.showReportDialog] call to customize your form. Learn more about configuring the modal by reading the [link:configuration docs].', + {code: , link: } + ); + +export const getCrashReportModalSnippetJavaScript = params => [ + { + code: [ + { + label: 'HTML', + value: 'html', + language: 'html', + code: ``, + }, + ], + }, +]; diff --git a/static/app/gettingStartedDocs/javascript/javascript.tsx b/static/app/gettingStartedDocs/javascript/javascript.tsx index ab7819f2986ff7..5513972f934aa1 100644 --- a/static/app/gettingStartedDocs/javascript/javascript.tsx +++ b/static/app/gettingStartedDocs/javascript/javascript.tsx @@ -1,4 +1,5 @@ import crashReportCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/crashReportCallout'; +import widgetCallout from 'sentry/components/onboarding/gettingStartedDoc/feedback/widgetCallout'; import TracePropagationMessage from 'sentry/components/onboarding/gettingStartedDoc/replay/tracePropagationMessage'; import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step'; import type { @@ -8,6 +9,10 @@ import type { } from 'sentry/components/onboarding/gettingStartedDoc/types'; import {getUploadSourceMapsStep} from 'sentry/components/onboarding/gettingStartedDoc/utils'; import { + getCrashReportModalConfigDescription, + getCrashReportModalInstallDescriptionJavaScript, + getCrashReportModalIntroduction, + getCrashReportModalSnippetJavaScript, getFeedbackConfigOptions, getFeedbackConfigureDescription, } from 'sentry/components/onboarding/gettingStartedDoc/utils/feedbackOnboarding'; @@ -241,12 +246,37 @@ const feedbackOnboarding: OnboardingConfig = { nextSteps: () => [], }; +const crashReportOnboarding: OnboardingConfig = { + introduction: () => getCrashReportModalIntroduction(), + install: (params: Params) => [ + { + type: StepType.INSTALL, + description: getCrashReportModalInstallDescriptionJavaScript(), + configurations: getCrashReportModalSnippetJavaScript(params), + }, + ], + configure: () => [ + { + type: StepType.CONFIGURE, + description: getCrashReportModalConfigDescription({ + link: 'https://docs.sentry.io/platforms/javascript/user-feedback/configuration/#crash-report-modal', + }), + additionalInfo: widgetCallout({ + link: 'https://docs.sentry.io/platforms/javascript/user-feedback/#user-feedback-widget', + }), + }, + ], + verify: () => [], + nextSteps: () => [], +}; + const docs: Docs = { onboarding, feedbackOnboardingNpm: feedbackOnboarding, replayOnboardingNpm: replayOnboarding, replayOnboardingJsLoader, customMetricsOnboarding: getJSMetricsOnboarding({getInstallConfig}), + crashReportOnboarding, }; export default docs; diff --git a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx index 4807a290ab5bbc..4c35166e47b052 100644 --- a/static/app/utils/analytics/feedbackAnalyticsEvents.tsx +++ b/static/app/utils/analytics/feedbackAnalyticsEvents.tsx @@ -17,7 +17,7 @@ export const feedbackEventMap: Record = { 'feedback.list-item-selected': 'Selected Item in Feedback List', 'feedback.details-integration-issue-clicked': 'Clicked Integration Issue Button in Feedback Details', - 'feedback.whats-new-banner-dismissed': 'Dismissed Feedback Whatss New Banner', + 'feedback.whats-new-banner-dismissed': 'Dismissed Feedback Whats New Banner', 'feedback.whats-new-banner-viewed': 'Viewed Feedback Whats New Banner', 'feedback.mark-spam-clicked': 'Marked Feedback as Spam', 'feedback.list-view-setup-sidebar': 'Viewed Feedback Onboarding Sidebar', diff --git a/static/app/views/userFeedback/userFeedbackEmpty.tsx b/static/app/views/userFeedback/userFeedbackEmpty.tsx index 4b49526085a8a0..d38ed1806dc0bf 100644 --- a/static/app/views/userFeedback/userFeedbackEmpty.tsx +++ b/static/app/views/userFeedback/userFeedbackEmpty.tsx @@ -7,6 +7,7 @@ import emptyStateImg from 'sentry-images/spot/feedback-empty-state.svg'; import {Button} from 'sentry/components/button'; import ButtonBar from 'sentry/components/buttonBar'; import EmptyStateWarning from 'sentry/components/emptyStateWarning'; +import {useFeedbackOnboardingSidebarPanel} from 'sentry/components/feedback/useFeedbackOnboarding'; import OnboardingPanel from 'sentry/components/onboardingPanel'; import {t} from 'sentry/locale'; import {trackAnalytics} from 'sentry/utils/analytics'; @@ -27,6 +28,8 @@ export function UserFeedbackEmpty({projectIds}: Props) { : projects; const hasAnyFeedback = selectedProjects.some(({hasUserReports}) => hasUserReports); + const hasNewOnboarding = organization.features.includes('user-feedback-onboarding'); + const {activateSidebarIssueDetails} = useFeedbackOnboardingSidebarPanel(); useEffect(() => { window.sentryEmbedCallback = function (embed) { @@ -84,14 +87,25 @@ export function UserFeedbackEmpty({projectIds}: Props) { )}

- + {hasNewOnboarding ? ( + + ) : ( + + )}