Skip to content
Merged
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,7 @@ def data_fn(offset: int, limit: int):
projects,
query.to_metrics_query(),
use_case_id=get_use_case_id(request.GET.get("useCase", "release-health")),
tenant_ids={"organization_id": organization.id},
)
data["query"] = query.query
except (
Expand Down
5 changes: 3 additions & 2 deletions src/sentry/api/endpoints/organization_stats_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,17 +173,18 @@ def get(self, request: Request, organization) -> Response:
Select a field, define a date range, and group or filter by columns.
"""
with self.handle_query_errors():
tenant_ids = {"organization_id": organization.id}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

does the referrer get injected somewhere else?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since referrer is passed to Snuba in every request, I've updated the Snuba query functions in utils/snuba.py to at least add the referrer per request into tenant ids.

Eg:

if referrer:
kwargs["tenant_ids"] = kwargs.get("tenant_ids") or dict()
kwargs["tenant_ids"]["referrer"] = referrer

with sentry_sdk.start_span(op="outcomes.endpoint", description="build_outcomes_query"):
query = self.build_outcomes_query(
request,
organization,
)
with sentry_sdk.start_span(op="outcomes.endpoint", description="run_outcomes_query"):
result_totals = run_outcomes_query_totals(query)
result_totals = run_outcomes_query_totals(query, tenant_ids=tenant_ids)
result_timeseries = (
None
if "project_id" in query.query_groupby
else run_outcomes_query_timeseries(query)
else run_outcomes_query_timeseries(query, tenant_ids=tenant_ids)
)
with sentry_sdk.start_span(
op="outcomes.endpoint", description="massage_outcomes_result"
Expand Down
6 changes: 5 additions & 1 deletion src/sentry/api/endpoints/project_key_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,11 @@ def get(self, request: Request, project, key_id) -> Response:
{"organization_id": project.organization_id},
)
results = massage_outcomes_result(
query_definition, [], run_outcomes_query_timeseries(query_definition)
query_definition,
[],
run_outcomes_query_timeseries(
query_definition, tenant_ids={"organization_id": project.organization_id}
),
)
except Exception:
raise ParseError(detail="Invalid request data")
Expand Down
16 changes: 12 additions & 4 deletions src/sentry/api/serializers/models/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -532,6 +532,9 @@ def _get_group_snuba_stats(
start=start,
orderby="group_id",
referrer="group.unhandled-flag",
tenant_ids={"organization_id": item_list[0].project.organization_id}
if item_list
else None,
)
for x in rv["data"]:
unhandled[x["group_id"]] = x["unhandled"]
Expand Down Expand Up @@ -1017,9 +1020,6 @@ def _execute_error_seen_stats_query(
if environment_ids:
filters["environment"] = environment_ids

org_id = item_list[0].project.organization_id if item_list else None
tenant_ids = {"organization_id": org_id} if org_id else dict()

return aliased_query(
dataset=Dataset.Events,
start=start,
Expand All @@ -1029,7 +1029,9 @@ def _execute_error_seen_stats_query(
filter_keys=filters,
aggregations=aggregations,
referrer="serializers.GroupSerializerSnuba._execute_error_seen_stats_query",
tenant_ids=tenant_ids,
tenant_ids={"organization_id": item_list[0].project.organization_id}
if item_list
else None,
)

@staticmethod
Expand Down Expand Up @@ -1060,6 +1062,9 @@ def _execute_perf_seen_stats_query(
filter_keys=filters,
aggregations=aggregations,
referrer="serializers.GroupSerializerSnuba._execute_perf_seen_stats_query",
tenant_ids={"organization_id": item_list[0].project.organization_id}
if item_list
else None,
)

@staticmethod
Expand All @@ -1086,6 +1091,9 @@ def _execute_generic_seen_stats_query(
filter_keys=filters,
aggregations=aggregations,
referrer="serializers.GroupSerializerSnuba._execute_generic_seen_stats_query",
tenant_ids={"organization_id": item_list[0].project.organization_id}
if item_list
else None,
)

@staticmethod
Expand Down
7 changes: 6 additions & 1 deletion src/sentry/release_health/metrics_sessions_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,12 @@ def run_sessions_query(
# TODO: Stop passing project IDs everywhere
projects = Project.objects.get_many_from_cache(project_ids)
try:
metrics_results = get_series(projects, metrics_query, use_case_id=UseCaseKey.RELEASE_HEALTH)
metrics_results = get_series(
projects,
metrics_query,
use_case_id=UseCaseKey.RELEASE_HEALTH,
tenant_ids={"organization_id": org_id},
)
except OrderByNotSupportedOverCompositeEntityException:
raise InvalidParams(f"Cannot order by {query.raw_orderby[0]} with the current filters")
except UtilsInvalidParams as e:
Expand Down
8 changes: 8 additions & 0 deletions src/sentry/search/events/builder/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,12 @@ def __init__(
"columns": set(),
}

# Base Tenant IDs for any Snuba Request built/executed using a QueryBuilder
org_id = self.organization_id or (
self.params.organization.id if self.params.organization else None
)
self.tenant_ids = {"organization_id": org_id} if org_id else None

# Function is a subclass of CurriedFunction
self.where: List[WhereType] = []
self.having: List[WhereType] = []
Expand Down Expand Up @@ -1440,6 +1446,7 @@ def get_snql_query(self) -> Request:
limitby=self.limitby,
),
flags=Flags(turbo=self.turbo),
tenant_ids=self.tenant_ids,
)

@classmethod
Expand Down Expand Up @@ -1611,6 +1618,7 @@ def get_snql_query(self) -> Request:
granularity=self.granularity,
limit=self.limit,
),
tenant_ids=self.tenant_ids,
)

def run_query(self, referrer: str, use_cache: bool = False) -> Any:
Expand Down
8 changes: 8 additions & 0 deletions src/sentry/search/events/builder/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,7 @@ def get_metrics_layer_snql_query(self) -> Request:
granularity=self.granularity,
),
flags=Flags(turbo=self.turbo),
tenant_ids=self.tenant_ids,
)

def get_snql_query(self) -> Request:
Expand Down Expand Up @@ -511,6 +512,7 @@ def get_snql_query(self) -> Request:
granularity=self.granularity,
),
flags=Flags(turbo=self.turbo),
tenant_ids=self.tenant_ids,
)

def _create_query_framework(self) -> Tuple[str, Dict[str, QueryFramework]]:
Expand Down Expand Up @@ -634,6 +636,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
if self.is_performance
else UseCaseKey.RELEASE_HEALTH,
include_meta=True,
tenant_ids=self.tenant_ids,
)
except Exception as err:
raise IncompatibleMetricsQuery(err)
Expand Down Expand Up @@ -730,6 +733,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
app_id="default",
query=query,
flags=Flags(turbo=self.turbo),
tenant_ids=self.tenant_ids,
)
current_result = raw_snql_query(
request,
Expand Down Expand Up @@ -955,6 +959,7 @@ def get_snql_query(self) -> List[Request]:

This is because different functions will use different entities
"""

# No need for primary from the query framework since there's no orderby to worry about
if self.use_metrics_layer:
prefix = "generic_" if self.dataset is Dataset.PerformanceMetrics else ""
Expand All @@ -980,6 +985,7 @@ def get_snql_query(self) -> List[Request]:
orderby=[],
granularity=self.granularity,
),
tenant_ids=self.tenant_ids,
)
]
_, query_framework = self._create_query_framework()
Expand All @@ -1001,6 +1007,7 @@ def get_snql_query(self) -> List[Request]:
granularity=self.granularity,
limit=self.limit,
),
tenant_ids=self.tenant_ids,
)
)

Expand All @@ -1027,6 +1034,7 @@ def run_query(self, referrer: str, use_cache: bool = False) -> Any:
if self.is_performance
else UseCaseKey.RELEASE_HEALTH,
include_meta=True,
tenant_ids=self.tenant_ids,
)
except Exception as err:
raise IncompatibleMetricsQuery(err)
Expand Down
1 change: 1 addition & 0 deletions src/sentry/search/events/builder/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,4 +72,5 @@ def get_snql_query(self) -> Request:
limitby=self.limitby,
),
flags=Flags(turbo=self.turbo),
tenant_ids=self.tenant_ids,
)
18 changes: 15 additions & 3 deletions src/sentry/search/snuba/executors.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def _prepare_params_for_category(
)

strategy = SEARCH_STRATEGIES.get(group_category, _query_params_for_generic)
return strategy(
snuba_query_params = strategy(
pinned_query_partial,
selected_columns,
aggregations,
Expand All @@ -290,6 +290,9 @@ def _prepare_params_for_category(
conditions,
actor,
)
if snuba_query_params is not None:
snuba_query_params.kwargs["tenant_ids"] = {"organization_id": organization_id}
return snuba_query_params

def snuba_search(
self,
Expand Down Expand Up @@ -1005,6 +1008,8 @@ def query(
op = Op.GTE if cursor.is_prev else Op.LTE
having.append(Condition(sort_func, op, cursor.value))

tenant_ids = {"organization_id": projects[0].organization_id} if projects else None

query = Query(
match=Join([Relationship(e_event, "grouped", e_group)]),
select=[
Expand All @@ -1017,7 +1022,12 @@ def query(
orderby=[OrderBy(sort_func, direction=Direction.DESC)],
limit=Limit(limit + 1),
)
request = Request(dataset="events", app_id="cdc", query=query)
request = Request(
dataset="events",
app_id="cdc",
query=query,
tenant_ids=tenant_ids,
)
data = snuba.raw_snql_query(request, referrer="search.snuba.cdc_search.query")["data"]

hits_query = Query(
Expand All @@ -1029,7 +1039,9 @@ def query(
)
hits = None
if count_hits:
request = Request(dataset="events", app_id="cdc", query=hits_query)
request = Request(
dataset="events", app_id="cdc", query=hits_query, tenant_ids=tenant_ids
)
hits = snuba.raw_snql_query(request, referrer="search.snuba.cdc_search.hits")["data"][
0
]["count"]
Expand Down
21 changes: 18 additions & 3 deletions src/sentry/snuba/metrics/datasource.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

"""
Module that gets both metadata and time series from Snuba.
For metadata, it fetch metrics metadata (metric names, tag names, tag values, ...) from snuba.
Expand Down Expand Up @@ -671,9 +673,13 @@ def get_series(
metrics_query: MetricsQuery,
use_case_id: UseCaseKey,
include_meta: bool = False,
tenant_ids: dict[str, Any] | None = None,
) -> dict:
"""Get time series for the given query"""

organization_id = projects[0].organization_id if projects else None
tenant_ids = tenant_ids or {"organization_id": organization_id} if organization_id else None

if metrics_query.interval is not None:
interval = metrics_query.interval
else:
Expand Down Expand Up @@ -767,7 +773,10 @@ def get_series(
initial_snuba_query = next(iter(snuba_queries.values()))["totals"]

request = Request(
dataset=Dataset.Metrics.value, app_id="default", query=initial_snuba_query
dataset=Dataset.Metrics.value,
app_id="default",
query=initial_snuba_query,
tenant_ids=tenant_ids,
)
initial_query_results = raw_snql_query(
request, use_cache=False, referrer="api.metrics.totals.initial_query"
Expand Down Expand Up @@ -806,7 +815,10 @@ def get_series(
snuba_query = _apply_group_limit_filters(snuba_query, group_limit_filters)

request = Request(
dataset=Dataset.Metrics.value, app_id="default", query=snuba_query
dataset=Dataset.Metrics.value,
app_id="default",
query=snuba_query,
tenant_ids=tenant_ids,
)
snuba_result = raw_snql_query(
request, use_cache=False, referrer=f"api.metrics.{key}.second_query"
Expand Down Expand Up @@ -835,7 +847,10 @@ def get_series(
snuba_query = _apply_group_limit_filters(snuba_query, group_limit_filters)

request = Request(
dataset=Dataset.Metrics.value, app_id="default", query=snuba_query
dataset=Dataset.Metrics.value,
app_id="default",
query=snuba_query,
tenant_ids=tenant_ids,
)
snuba_result = raw_snql_query(
request,
Expand Down
18 changes: 14 additions & 4 deletions src/sentry/snuba/outcomes.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Sequence, Tuple

Expand Down Expand Up @@ -298,7 +300,9 @@ def get_conditions(self, query: QueryDict, params: Mapping[Any, Any]) -> List[An
return query_conditions


def run_outcomes_query_totals(query: QueryDefinition) -> ResultSet:
def run_outcomes_query_totals(
query: QueryDefinition, tenant_ids: dict[str, Any] | None = None
) -> ResultSet:
snql_query = Query(
match=Entity(query.match),
select=query.select_params,
Expand All @@ -308,12 +312,16 @@ def run_outcomes_query_totals(query: QueryDefinition) -> ResultSet:
offset=Offset(0),
granularity=Granularity(query.rollup),
)
request = Request(dataset=query.dataset.value, app_id="default", query=snql_query)
request = Request(
dataset=query.dataset.value, app_id="default", query=snql_query, tenant_ids=tenant_ids
)
result = raw_snql_query(request, referrer="outcomes.totals")
return _format_rows(result["data"], query)


def run_outcomes_query_timeseries(query: QueryDefinition) -> ResultSet:
def run_outcomes_query_timeseries(
query: QueryDefinition, tenant_ids: dict[str, Any] | None = None
) -> ResultSet:
snql_query = Query(
match=Entity(query.match),
select=query.select_params,
Expand All @@ -323,7 +331,9 @@ def run_outcomes_query_timeseries(query: QueryDefinition) -> ResultSet:
offset=Offset(0),
granularity=Granularity(query.rollup),
)
request = Request(dataset=query.dataset.value, app_id="default", query=snql_query)
request = Request(
dataset=query.dataset.value, app_id="default", query=snql_query, tenant_ids=tenant_ids
)
result_timeseries = raw_snql_query(request, referrer="outcomes.timeseries")
return _format_rows(result_timeseries["data"], query)

Expand Down