Skip to content
2 changes: 1 addition & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ rfc3986-validator>=0.1.1
sentry-arroyo>=2.6.0
sentry-relay>=0.8.18
sentry-sdk>=1.15.0
snuba-sdk>=1.0.3
snuba-sdk>=1.0.5
simplejson>=3.17.6
statsd>=3.3
structlog>=22
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ sentry-sdk==1.15.0
simplejson==3.17.6
six==1.16.0
sniffio==1.2.0
snuba-sdk==1.0.3
snuba-sdk==1.0.5
sortedcontainers==2.4.0
soupsieve==2.3.2.post1
sqlparse==0.3.0
Expand Down
2 changes: 1 addition & 1 deletion requirements-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ sentry-sdk==1.15.0
simplejson==3.17.6
six==1.16.0
sniffio==1.2.0
snuba-sdk==1.0.3
snuba-sdk==1.0.5
sortedcontainers==2.4.0
soupsieve==2.3.2.post1
sqlparse==0.3.0
Expand Down
28 changes: 20 additions & 8 deletions src/sentry/api/endpoints/group_hashes_split.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from django.db import transaction
from rest_framework.request import Request
from rest_framework.response import Response
from snuba_sdk import Request as SnubaRequest
from snuba_sdk.conditions import Condition, Op
from snuba_sdk.orderby import Direction, OrderBy
from snuba_sdk.query import Column, Entity, Function, Query
Expand Down Expand Up @@ -158,10 +159,14 @@ def _get_full_hierarchical_hashes(group: Group, hash: str) -> Optional[Sequence[
]
)
)
request = Request(dataset="events", app_id="grouping", query=query)
data = snuba.raw_snql_query(request, referrer="group_split.get_full_hierarchical_hashes")[
"data"
]
referrer = "group_split.get_full_hierarchical_hashes"
request = SnubaRequest(
dataset="events",
app_id="grouping",
query=query,
tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id},
)
data = snuba.raw_snql_query(request, referrer)["data"]
if not data:
return None

Expand Down Expand Up @@ -385,10 +390,17 @@ def _render_trees(group: Group, user):
)

rv = []
request = Request(dataset="events", app_id="grouping", query=query)
for row in snuba.raw_snql_query(request, referrer="api.group_split.render_grouping_tree")[
"data"
]:
referrer = "api.group_split.render_grouping_tree"
request = SnubaRequest(
dataset="events",
app_id="grouping",
query=query,
tenant_ids={
"referrer": referrer,
"organization_id": group.project.organization_id,
},
)
for row in snuba.raw_snql_query(request, referrer)["data"]:
if len(row["hash_slice"]) == 0:
hash = row["primary_hash"]
parent_hash = child_hash = None
Expand Down
22 changes: 17 additions & 5 deletions src/sentry/api/endpoints/grouping_level_new_issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,15 @@ def _get_hash_for_parent_level(group: Group, id: int, levels_overview: LevelsOve
.set_where(_get_group_filters(group))
.set_limit(1)
)
request = SnubaRequest(dataset="events", app_id="grouping", query=query)
request = SnubaRequest(
dataset="events",
app_id="grouping",
query=query,
tenant_ids={
"referrer": "api.group_hashes_levels.get_hash_for_parent_level",
"organization_id": group.project.organization_id,
},
)
return_hash: str = get_path(snuba.raw_snql_query(request), "data", 0, "hash") # type: ignore
cache.set(cache_key, return_hash)

Expand Down Expand Up @@ -187,10 +195,14 @@ def _query_snuba(group: Group, id: int, offset=None, limit=None):
if limit is not None:
query = query.set_limit(limit)

request = SnubaRequest(dataset="events", app_id="grouping", query=query)
return snuba.raw_snql_query(request, referrer="api.group_hashes_levels.get_level_new_issues")[
"data"
]
referrer = "api.group_hashes_levels.get_level_new_issues"
request = SnubaRequest(
dataset="events",
app_id="grouping",
query=query,
tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id},
)
return snuba.raw_snql_query(request, referrer)["data"]


def _process_snuba_results(query_res, group: Group, id: int, user):
Expand Down
12 changes: 9 additions & 3 deletions src/sentry/api/endpoints/grouping_levels.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ class LevelsOverview:
num_levels: int


def get_levels_overview(group):
def get_levels_overview(group: Group):
query = (
Query(Entity("events"))
.set_select(
Expand All @@ -127,8 +127,14 @@ def get_levels_overview(group):
.set_where(_get_group_filters(group))
.set_groupby([Column("primary_hash")])
)
request = SnubaRequest(dataset="events", app_id="grouping", query=query)
res = snuba.raw_snql_query(request, referrer="api.group_hashes_levels.get_levels_overview")
referrer = "api.group_hashes_levels.get_levels_overview"
request = SnubaRequest(
dataset="events",
app_id="grouping",
query=query,
tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id},
)
res = snuba.raw_snql_query(request, referrer)

if not res["data"]:
raise NoEvents()
Expand Down
4 changes: 3 additions & 1 deletion src/sentry/api/endpoints/organization_events_span_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,10 @@ def data_fn(offset: int, limit: int) -> Any:
offset=offset,
orderby="-count",
)
referrer = "api.organization-events-span-ops"
snql_query = builder.get_snql_query()
results = raw_snql_query(snql_query, "api.organization-events-span-ops")
snql_query.tenant_ids = {"referrer": referrer, "organization_id": organization.id}
results = raw_snql_query(snql_query, referrer)
return [SpanOp(op=row["spans_op"], count=row["count"]) for row in results["data"]]

with self.handle_query_errors():
Expand Down
22 changes: 18 additions & 4 deletions src/sentry/api/endpoints/project_dynamic_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,16 @@ def __project_stats_query(self, query, projects_in_org, org_id, query_time_range
alias="root_count",
)
]
referrer = Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_PROJECT_STATS.value
snuba_query = snuba_query.set_select(snuba_query.select + extra_select)
data = raw_snql_query(
SnubaRequest(dataset=Dataset.Discover.value, app_id="default", query=snuba_query),
referrer=Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_PROJECT_STATS.value,
SnubaRequest(
dataset=Dataset.Discover.value,
app_id="default",
query=snuba_query,
tenant_ids={"referrer": referrer, "organization_id": org_id},
),
referrer,
)
return builder.process_results(data)["data"]

Expand Down Expand Up @@ -259,13 +265,21 @@ def __fetch_randomly_sampled_transactions(self, project, query, sample_size, que
)
]
)

referrer = Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_TRANSACTIONS.value

snuba_query = snuba_query.set_groupby(
snuba_query.groupby + [Column("modulo_num"), Column("contexts.key")]
)

data = raw_snql_query(
SnubaRequest(dataset=Dataset.Discover.value, app_id="default", query=snuba_query),
referrer=Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_TRANSACTIONS.value,
SnubaRequest(
dataset=Dataset.Discover.value,
app_id="default",
query=snuba_query,
tenant_ids={"referrer": referrer, "organization_id": project.organization_id},
),
referrer,
)["data"]
return data

Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/serializers/models/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,7 @@ def __init__(
from sentry.search.snuba.executors import get_search_filter

self.environment_ids = environment_ids

self.organization_id = organization_id
# XXX: We copy this logic from `PostgresSnubaQueryExecutor.query`. Ideally we
# should try and encapsulate this logic, but if you're changing this, change it
# there as well.
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/serializers/models/group_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,7 @@ def query_tsdb(
snuba_tsdb.get_range,
environment_ids=environment_ids,
conditions=conditions,
tenant_ids={"organization_id": self.organization_id},
**query_params,
)
if error_issue_ids:
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/ingest/transaction_clusterer/datasource/snuba.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ def fetch_unique_transaction_names(
project: Project, time_range: Tuple[datetime, datetime], limit: int
) -> Iterable[str]:
then, now = time_range
referrer = "src.sentry.ingest.transaction_clusterer"
snuba_request = Request(
"transactions",
app_id="transactions",
Expand All @@ -27,9 +28,8 @@ def fetch_unique_transaction_names(
groupby=[Column("transaction")],
limit=Limit(limit),
),
tenant_ids={"referrer": referrer, "organization_id": project.organization_id},
)
snuba_response = raw_snql_query(
snuba_request, referrer="src.sentry.ingest.transaction_clusterer"
)
snuba_response = raw_snql_query(snuba_request, referrer)

return (row["transaction"] for row in snuba_response["data"])
10 changes: 6 additions & 4 deletions src/sentry/release_health/release_monitor/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,15 @@ def fetch_project_release_health_totals(
.set_limit(self.CHUNK_SIZE + 1)
.set_offset(offset)
)
referrer = "release_monitor.fetch_project_release_health_totals"
request = Request(
dataset=Dataset.Metrics.value, app_id="release_health", query=query
dataset=Dataset.Metrics.value,
app_id="release_health",
query=query,
tenant_ids={"referrer": referrer, "organization_id": org_id},
)
with metrics.timer("release_monitor.fetch_project_release_health_totals.query"):
data = raw_snql_query(
request, referrer="release_monitor.fetch_project_release_health_totals"
)["data"]
data = raw_snql_query(request, referrer)["data"]
count = len(data)
more_results = count > self.CHUNK_SIZE
offset += self.CHUNK_SIZE
Expand Down
7 changes: 6 additions & 1 deletion src/sentry/snuba/metrics/fields/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,12 @@ def run_metrics_query(
+ where,
granularity=Granularity(GRANULARITY),
)
request = Request(dataset=Dataset.Metrics.value, app_id="metrics", query=query)
request = Request(
dataset=Dataset.Metrics.value,
app_id="metrics",
query=query,
tenant_ids={"referrer": referrer, "organization_id": org_id},
)
result = raw_snql_query(request, referrer, use_cache=True)
return cast(List[SnubaDataType], result["data"])

Expand Down
3 changes: 3 additions & 0 deletions src/sentry/snuba/referrer.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@ class ReferrerBase(Enum):
API_GROUP_EVENTS_PERFORMANCE_DIRECT_HIT = "api.group-events.performance.direct-hit"
API_GROUP_EVENTS_PERFORMANCE = "api.group-events.performance"
API_GROUP_HASHES_LEVELS_GET_LEVEL_NEW_ISSUES = "api.group_hashes_levels.get_level_new_issues"
API_GROUP_HASHES_LEVELS_GET_HASH_FOR_PARENT_LEVEL = (
"api.group_hashes_levels.get_hash_for_parent_level"
)
API_GROUP_HASHES_LEVELS_GET_LEVELS_OVERVIEW = "api.group_hashes_levels.get_levels_overview"
API_GROUP_HASHES = "api.group-hashes"
API_ISSUES_ISSUE_EVENTS = "api.issues.issue_events"
Expand Down
24 changes: 16 additions & 8 deletions src/sentry/snuba/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,14 @@ def _check_releases_have_health_data(
Condition(Column("release"), Op.IN, release_versions),
],
)
request = Request(dataset="sessions", app_id="default", query=query)
data = snuba.raw_snql_query(request, referrer="snuba.sessions.check_releases_have_health_data")[
"data"
]
referrer = "snuba.sessions.check_releases_have_health_data"
request = Request(
dataset="sessions",
app_id="default",
query=query,
tenant_ids={"referrer": referrer, "organization_id": organization_id},
)
data = snuba.raw_snql_query(request, referrer)["data"]
return {row["release"] for row in data}


Expand Down Expand Up @@ -265,10 +269,14 @@ def _get_project_releases_count(
where=where,
having=having,
)
request = Request(dataset="sessions", app_id="default", query=query)
data = snuba.raw_snql_query(request, referrer="snuba.sessions.get_project_releases_count")[
"data"
]
referrer = "snuba.sessions.get_project_releases_count"
request = Request(
dataset="sessions",
app_id="default",
query=query,
tenant_ids={"referrer": referrer, "organization_id": organization_id},
)
data = snuba.raw_snql_query(request, referrer)["data"]
return data[0]["count"] if data else 0


Expand Down
15 changes: 12 additions & 3 deletions src/sentry/tsdb/snuba.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

import dataclasses
import functools
import itertools
Expand Down Expand Up @@ -272,6 +274,7 @@ def get_data(
conditions=None,
use_cache=False,
jitter_value=None,
tenant_ids: dict[str, str | int] = None,
):
if model in self.non_outcomes_snql_query_settings:
# no way around having to explicitly map legacy condition format to SnQL since this function
Expand Down Expand Up @@ -307,6 +310,7 @@ def get_data(
model in (TSDBModel.group_generic, TSDBModel.users_affected_by_generic_group)
),
is_grouprelease=(model == TSDBModel.frequent_releases_by_group),
tenant_ids=tenant_ids,
)
else:
return self.__get_data_legacy(
Expand Down Expand Up @@ -340,6 +344,7 @@ def __get_data_snql(
jitter_value: Optional[int] = None,
manual_group_on_time: bool = False,
is_grouprelease: bool = False,
tenant_ids: dict[str, str | int] = None,
):
"""
Similar to __get_data_legacy but uses the SnQL format. For future additions, prefer using this impl over
Expand Down Expand Up @@ -436,6 +441,9 @@ def __get_data_snql(
Condition(Column(time_column), Op.LT, end),
]

referrer = f"tsdb-modelid:{model.value}"
tenant_ids = tenant_ids or dict()
tenant_ids["referrer"] = referrer
snql_request = Request(
dataset=model_dataset.value,
app_id="tsdb.get_data",
Expand All @@ -448,10 +456,9 @@ def __get_data_snql(
granularity=Granularity(rollup),
limit=Limit(limit),
),
tenant_ids=tenant_ids,
)
query_result = raw_snql_query(
snql_request, referrer=f"tsdb-modelid:{model.value}", use_cache=use_cache
)
query_result = raw_snql_query(snql_request, referrer=referrer, use_cache=use_cache)
if manual_group_on_time:
translated_results = {"data": query_result["data"]}
else:
Expand Down Expand Up @@ -705,6 +712,7 @@ def get_range(
conditions=None,
use_cache=False,
jitter_value=None,
tenant_ids=None,
):
model_query_settings = self.model_query_settings.get(model)
assert model_query_settings is not None, f"Unsupported TSDBModel: {model.name}"
Expand All @@ -726,6 +734,7 @@ def get_range(
conditions=conditions,
use_cache=use_cache,
jitter_value=jitter_value,
tenant_ids=tenant_ids,
)
# convert
# {group:{timestamp:count, ...}}
Expand Down