Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref: Remove references to deprecated settings #66355

Merged
merged 1 commit into from Mar 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
26 changes: 0 additions & 26 deletions src/sentry/testutils/pytest/kafka.py
Expand Up @@ -63,32 +63,6 @@ def inner(settings):
return inner


@pytest.fixture
def kafka_topics_setter():
"""
Returns a function that given a Django settings objects will setup the
kafka topics names to test names.

:return: a function that given a settings object changes all kafka topic names
to "test-<normal_topic_name>"
"""

def set_test_kafka_settings(settings):
settings.KAFKA_INGEST_EVENTS = "ingest-events"
settings.KAFKA_TOPICS[settings.KAFKA_INGEST_EVENTS] = {"cluster": "default"}

settings.INGEST_TRANSACTIONS = "ingest-transactions"
settings.KAFKA_TOPICS[settings.INGEST_TRANSACTIONS] = {"cluster": "default"}

settings.KAFKA_INGEST_ATTACHMENTS = "ingest-attachments"
settings.KAFKA_TOPICS[settings.KAFKA_INGEST_ATTACHMENTS] = {"cluster": "default"}

settings.KAFKA_OUTCOMES = "outcomes"
settings.KAFKA_TOPICS[settings.KAFKA_OUTCOMES] = {"cluster": "default"}

return set_test_kafka_settings


@pytest.fixture(scope="session")
def scope_consumers():
"""
Expand Down
14 changes: 4 additions & 10 deletions src/sentry/utils/outcomes.py
Expand Up @@ -4,8 +4,6 @@
from datetime import datetime
from enum import IntEnum

from django.conf import settings

from sentry.conf.types.kafka_definition import Topic
from sentry.constants import DataCategory
from sentry.utils import json, kafka_config, metrics
Expand Down Expand Up @@ -98,14 +96,10 @@ def track_outcome(

timestamp = timestamp or to_datetime(time.time())

# Send billing outcomes to a dedicated topic if there is a separate
# configuration for it. Otherwise, fall back to the regular outcomes topic.
# This does NOT switch the producer, if both topics are on the same cluster.
#
# In Sentry, there is no significant difference between the classes of
# outcome. In Sentry SaaS, they have elevated stability requirements as they
# are used for spike protection and quota enforcement.
topic_name = settings.KAFKA_OUTCOMES_BILLING if use_billing else settings.KAFKA_OUTCOMES
# Send billing outcomes to a dedicated topic.
topic_name = (
billing_config["real_topic_name"] if use_billing else outcomes_config["real_topic_name"]
)

# Send a snuba metrics payload.
publisher.publish(
Expand Down
101 changes: 42 additions & 59 deletions tests/sentry/utils/test_outcomes.py
Expand Up @@ -2,7 +2,6 @@
from unittest import mock

import pytest
from django.conf import settings

from sentry.conf.types.kafka_definition import Topic
from sentry.utils import json, kafka_config, outcomes
Expand Down Expand Up @@ -61,45 +60,38 @@ def test_parse_outcome(name, outcome):
def test_track_outcome_default(setup):
"""
Asserts an outcomes serialization roundtrip with defaults.

Additionally checks that non-billing outcomes are routed to the DEFAULT
outcomes cluster and topic, even if there is a separate cluster for billing
outcomes.
"""
track_outcome(
org_id=1,
project_id=2,
key_id=3,
outcome=Outcome.INVALID,
reason="project_id",
)

# Provide a billing cluster config that should be ignored
with mock.patch.dict(
settings.KAFKA_TOPICS, {settings.KAFKA_OUTCOMES_BILLING: {"cluster": "different"}}
):
track_outcome(
org_id=1,
project_id=2,
key_id=3,
outcome=Outcome.INVALID,
reason="project_id",
)

cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args
assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],)

assert outcomes.outcomes_publisher
(topic_name, payload), _ = setup.mock_publisher.return_value.publish.call_args
assert topic_name == settings.KAFKA_OUTCOMES
cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args
assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],)

data = json.loads(payload)
del data["timestamp"]
assert data == {
"org_id": 1,
"project_id": 2,
"key_id": 3,
"outcome": Outcome.INVALID.value,
"reason": "project_id",
"event_id": None,
"category": None,
"quantity": 1,
}
assert outcomes.outcomes_publisher
(topic_name, payload), _ = setup.mock_publisher.return_value.publish.call_args

# not billing because it's not accepted/rate limited
assert topic_name == "outcomes"

data = json.loads(payload)
del data["timestamp"]
assert data == {
"org_id": 1,
"project_id": 2,
"key_id": 3,
"outcome": Outcome.INVALID.value,
"reason": "project_id",
"event_id": None,
"category": None,
"quantity": 1,
}

assert outcomes.billing_publisher is None
assert outcomes.billing_publisher is None


def test_track_outcome_billing(setup):
Expand All @@ -120,7 +112,7 @@ def test_track_outcome_billing(setup):

assert outcomes.outcomes_publisher
(topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args
assert topic_name == settings.KAFKA_OUTCOMES_BILLING
assert topic_name == "outcomes-billing"

assert outcomes.billing_publisher is None

Expand All @@ -130,30 +122,21 @@ def test_track_outcome_billing_topic(setup):
Checks that outcomes are routed to the DEDICATED billing topic within the
same cluster in default configuration.
"""
track_outcome(
org_id=1,
project_id=1,
key_id=1,
outcome=Outcome.ACCEPTED,
)

with mock.patch.dict(
settings.KAFKA_TOPICS,
{
settings.KAFKA_OUTCOMES_BILLING: {
"cluster": kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],
}
},
):
track_outcome(
org_id=1,
project_id=1,
key_id=1,
outcome=Outcome.ACCEPTED,
)

cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args
assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],)
cluster_args, _ = setup.mock_get_kafka_producer_cluster_options.call_args
assert cluster_args == (kafka_config.get_topic_definition(Topic.OUTCOMES)["cluster"],)

assert outcomes.outcomes_publisher
(topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args
assert topic_name == settings.KAFKA_OUTCOMES_BILLING
assert outcomes.outcomes_publisher
(topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args
assert topic_name == "outcomes-billing"

assert outcomes.billing_publisher is None
assert outcomes.billing_publisher is None


def test_track_outcome_billing_cluster(settings, setup):
Expand All @@ -174,6 +157,6 @@ def test_track_outcome_billing_cluster(settings, setup):

assert outcomes.billing_publisher
(topic_name, _), _ = setup.mock_publisher.return_value.publish.call_args
assert topic_name == settings.KAFKA_OUTCOMES_BILLING
assert topic_name == "outcomes-billing"

assert outcomes.outcomes_publisher is None