diff --git a/requirements-base.txt b/requirements-base.txt index a407f9faf5b6cb..fe18a17814f67d 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -57,7 +57,7 @@ rfc3986-validator>=0.1.1 sentry-arroyo>=2.6.0 sentry-relay>=0.8.18 sentry-sdk>=1.15.0 -snuba-sdk>=1.0.3 +snuba-sdk>=1.0.5 simplejson>=3.17.6 statsd>=3.3 structlog>=22 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index be84d30d39dd6d..216499d446b5f5 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -160,7 +160,7 @@ sentry-sdk==1.15.0 simplejson==3.17.6 six==1.16.0 sniffio==1.2.0 -snuba-sdk==1.0.3 +snuba-sdk==1.0.5 sortedcontainers==2.4.0 soupsieve==2.3.2.post1 sqlparse==0.3.0 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 4e674b861242df..634dfc0a38ee5c 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -113,7 +113,7 @@ sentry-sdk==1.15.0 simplejson==3.17.6 six==1.16.0 sniffio==1.2.0 -snuba-sdk==1.0.3 +snuba-sdk==1.0.5 sortedcontainers==2.4.0 soupsieve==2.3.2.post1 sqlparse==0.3.0 diff --git a/src/sentry/api/endpoints/group_hashes_split.py b/src/sentry/api/endpoints/group_hashes_split.py index 2071e1a9617b4e..3250c1a4c8b385 100644 --- a/src/sentry/api/endpoints/group_hashes_split.py +++ b/src/sentry/api/endpoints/group_hashes_split.py @@ -5,6 +5,7 @@ from django.db import transaction from rest_framework.request import Request from rest_framework.response import Response +from snuba_sdk import Request as SnubaRequest from snuba_sdk.conditions import Condition, Op from snuba_sdk.orderby import Direction, OrderBy from snuba_sdk.query import Column, Entity, Function, Query @@ -158,10 +159,14 @@ def _get_full_hierarchical_hashes(group: Group, hash: str) -> Optional[Sequence[ ] ) ) - request = Request(dataset="events", app_id="grouping", query=query) - data = snuba.raw_snql_query(request, referrer="group_split.get_full_hierarchical_hashes")[ - "data" - ] + referrer = "group_split.get_full_hierarchical_hashes" + request = SnubaRequest( + dataset="events", + app_id="grouping", + query=query, + tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id}, + ) + data = snuba.raw_snql_query(request, referrer)["data"] if not data: return None @@ -385,10 +390,17 @@ def _render_trees(group: Group, user): ) rv = [] - request = Request(dataset="events", app_id="grouping", query=query) - for row in snuba.raw_snql_query(request, referrer="api.group_split.render_grouping_tree")[ - "data" - ]: + referrer = "api.group_split.render_grouping_tree" + request = SnubaRequest( + dataset="events", + app_id="grouping", + query=query, + tenant_ids={ + "referrer": referrer, + "organization_id": group.project.organization_id, + }, + ) + for row in snuba.raw_snql_query(request, referrer)["data"]: if len(row["hash_slice"]) == 0: hash = row["primary_hash"] parent_hash = child_hash = None diff --git a/src/sentry/api/endpoints/grouping_level_new_issues.py b/src/sentry/api/endpoints/grouping_level_new_issues.py index d6a2b39c5a62cf..712d86b37a42ec 100644 --- a/src/sentry/api/endpoints/grouping_level_new_issues.py +++ b/src/sentry/api/endpoints/grouping_level_new_issues.py @@ -106,7 +106,15 @@ def _get_hash_for_parent_level(group: Group, id: int, levels_overview: LevelsOve .set_where(_get_group_filters(group)) .set_limit(1) ) - request = SnubaRequest(dataset="events", app_id="grouping", query=query) + request = SnubaRequest( + dataset="events", + app_id="grouping", + query=query, + tenant_ids={ + "referrer": "api.group_hashes_levels.get_hash_for_parent_level", + "organization_id": group.project.organization_id, + }, + ) return_hash: str = get_path(snuba.raw_snql_query(request), "data", 0, "hash") # type: ignore cache.set(cache_key, return_hash) @@ -187,10 +195,14 @@ def _query_snuba(group: Group, id: int, offset=None, limit=None): if limit is not None: query = query.set_limit(limit) - request = SnubaRequest(dataset="events", app_id="grouping", query=query) - return snuba.raw_snql_query(request, referrer="api.group_hashes_levels.get_level_new_issues")[ - "data" - ] + referrer = "api.group_hashes_levels.get_level_new_issues" + request = SnubaRequest( + dataset="events", + app_id="grouping", + query=query, + tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id}, + ) + return snuba.raw_snql_query(request, referrer)["data"] def _process_snuba_results(query_res, group: Group, id: int, user): diff --git a/src/sentry/api/endpoints/grouping_levels.py b/src/sentry/api/endpoints/grouping_levels.py index cf461018dbc635..9c25ad0a89e0a3 100644 --- a/src/sentry/api/endpoints/grouping_levels.py +++ b/src/sentry/api/endpoints/grouping_levels.py @@ -112,7 +112,7 @@ class LevelsOverview: num_levels: int -def get_levels_overview(group): +def get_levels_overview(group: Group): query = ( Query(Entity("events")) .set_select( @@ -127,8 +127,14 @@ def get_levels_overview(group): .set_where(_get_group_filters(group)) .set_groupby([Column("primary_hash")]) ) - request = SnubaRequest(dataset="events", app_id="grouping", query=query) - res = snuba.raw_snql_query(request, referrer="api.group_hashes_levels.get_levels_overview") + referrer = "api.group_hashes_levels.get_levels_overview" + request = SnubaRequest( + dataset="events", + app_id="grouping", + query=query, + tenant_ids={"referrer": referrer, "organization_id": group.project.organization_id}, + ) + res = snuba.raw_snql_query(request, referrer) if not res["data"]: raise NoEvents() diff --git a/src/sentry/api/endpoints/organization_events_span_ops.py b/src/sentry/api/endpoints/organization_events_span_ops.py index f4ee3f03665f88..7c110efb11ea64 100644 --- a/src/sentry/api/endpoints/organization_events_span_ops.py +++ b/src/sentry/api/endpoints/organization_events_span_ops.py @@ -38,8 +38,10 @@ def data_fn(offset: int, limit: int) -> Any: offset=offset, orderby="-count", ) + referrer = "api.organization-events-span-ops" snql_query = builder.get_snql_query() - results = raw_snql_query(snql_query, "api.organization-events-span-ops") + snql_query.tenant_ids = {"referrer": referrer, "organization_id": organization.id} + results = raw_snql_query(snql_query, referrer) return [SpanOp(op=row["spans_op"], count=row["count"]) for row in results["data"]] with self.handle_query_errors(): diff --git a/src/sentry/api/endpoints/project_dynamic_sampling.py b/src/sentry/api/endpoints/project_dynamic_sampling.py index 69db39ee8e5f57..994ae8fe057f94 100644 --- a/src/sentry/api/endpoints/project_dynamic_sampling.py +++ b/src/sentry/api/endpoints/project_dynamic_sampling.py @@ -110,10 +110,16 @@ def __project_stats_query(self, query, projects_in_org, org_id, query_time_range alias="root_count", ) ] + referrer = Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_PROJECT_STATS.value snuba_query = snuba_query.set_select(snuba_query.select + extra_select) data = raw_snql_query( - SnubaRequest(dataset=Dataset.Discover.value, app_id="default", query=snuba_query), - referrer=Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_PROJECT_STATS.value, + SnubaRequest( + dataset=Dataset.Discover.value, + app_id="default", + query=snuba_query, + tenant_ids={"referrer": referrer, "organization_id": org_id}, + ), + referrer, ) return builder.process_results(data)["data"] @@ -259,13 +265,21 @@ def __fetch_randomly_sampled_transactions(self, project, query, sample_size, que ) ] ) + + referrer = Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_TRANSACTIONS.value + snuba_query = snuba_query.set_groupby( snuba_query.groupby + [Column("modulo_num"), Column("contexts.key")] ) data = raw_snql_query( - SnubaRequest(dataset=Dataset.Discover.value, app_id="default", query=snuba_query), - referrer=Referrer.DYNAMIC_SAMPLING_DISTRIBUTION_FETCH_TRANSACTIONS.value, + SnubaRequest( + dataset=Dataset.Discover.value, + app_id="default", + query=snuba_query, + tenant_ids={"referrer": referrer, "organization_id": project.organization_id}, + ), + referrer, )["data"] return data diff --git a/src/sentry/api/serializers/models/group.py b/src/sentry/api/serializers/models/group.py index cba720b1cde20f..e00ccd4c905d8a 100644 --- a/src/sentry/api/serializers/models/group.py +++ b/src/sentry/api/serializers/models/group.py @@ -896,7 +896,7 @@ def __init__( from sentry.search.snuba.executors import get_search_filter self.environment_ids = environment_ids - + self.organization_id = organization_id # XXX: We copy this logic from `PostgresSnubaQueryExecutor.query`. Ideally we # should try and encapsulate this logic, but if you're changing this, change it # there as well. diff --git a/src/sentry/api/serializers/models/group_stream.py b/src/sentry/api/serializers/models/group_stream.py index 0e6e0ee573a663..b0efae12fbc7d2 100644 --- a/src/sentry/api/serializers/models/group_stream.py +++ b/src/sentry/api/serializers/models/group_stream.py @@ -348,6 +348,7 @@ def query_tsdb( snuba_tsdb.get_range, environment_ids=environment_ids, conditions=conditions, + tenant_ids={"organization_id": self.organization_id}, **query_params, ) if error_issue_ids: diff --git a/src/sentry/ingest/transaction_clusterer/datasource/snuba.py b/src/sentry/ingest/transaction_clusterer/datasource/snuba.py index 779274392d18b3..2394896d105c43 100644 --- a/src/sentry/ingest/transaction_clusterer/datasource/snuba.py +++ b/src/sentry/ingest/transaction_clusterer/datasource/snuba.py @@ -12,6 +12,7 @@ def fetch_unique_transaction_names( project: Project, time_range: Tuple[datetime, datetime], limit: int ) -> Iterable[str]: then, now = time_range + referrer = "src.sentry.ingest.transaction_clusterer" snuba_request = Request( "transactions", app_id="transactions", @@ -27,9 +28,8 @@ def fetch_unique_transaction_names( groupby=[Column("transaction")], limit=Limit(limit), ), + tenant_ids={"referrer": referrer, "organization_id": project.organization_id}, ) - snuba_response = raw_snql_query( - snuba_request, referrer="src.sentry.ingest.transaction_clusterer" - ) + snuba_response = raw_snql_query(snuba_request, referrer) return (row["transaction"] for row in snuba_response["data"]) diff --git a/src/sentry/release_health/release_monitor/metrics.py b/src/sentry/release_health/release_monitor/metrics.py index 65b6828340e12d..7485841b5b27e0 100644 --- a/src/sentry/release_health/release_monitor/metrics.py +++ b/src/sentry/release_health/release_monitor/metrics.py @@ -147,13 +147,15 @@ def fetch_project_release_health_totals( .set_limit(self.CHUNK_SIZE + 1) .set_offset(offset) ) + referrer = "release_monitor.fetch_project_release_health_totals" request = Request( - dataset=Dataset.Metrics.value, app_id="release_health", query=query + dataset=Dataset.Metrics.value, + app_id="release_health", + query=query, + tenant_ids={"referrer": referrer, "organization_id": org_id}, ) with metrics.timer("release_monitor.fetch_project_release_health_totals.query"): - data = raw_snql_query( - request, referrer="release_monitor.fetch_project_release_health_totals" - )["data"] + data = raw_snql_query(request, referrer)["data"] count = len(data) more_results = count > self.CHUNK_SIZE offset += self.CHUNK_SIZE diff --git a/src/sentry/snuba/metrics/fields/base.py b/src/sentry/snuba/metrics/fields/base.py index 14817590992cc6..affff6a872a2c7 100644 --- a/src/sentry/snuba/metrics/fields/base.py +++ b/src/sentry/snuba/metrics/fields/base.py @@ -145,7 +145,12 @@ def run_metrics_query( + where, granularity=Granularity(GRANULARITY), ) - request = Request(dataset=Dataset.Metrics.value, app_id="metrics", query=query) + request = Request( + dataset=Dataset.Metrics.value, + app_id="metrics", + query=query, + tenant_ids={"referrer": referrer, "organization_id": org_id}, + ) result = raw_snql_query(request, referrer, use_cache=True) return cast(List[SnubaDataType], result["data"]) diff --git a/src/sentry/snuba/referrer.py b/src/sentry/snuba/referrer.py index cbbd4400291b6c..c452e5a57bde4f 100644 --- a/src/sentry/snuba/referrer.py +++ b/src/sentry/snuba/referrer.py @@ -69,6 +69,9 @@ class ReferrerBase(Enum): API_GROUP_EVENTS_PERFORMANCE_DIRECT_HIT = "api.group-events.performance.direct-hit" API_GROUP_EVENTS_PERFORMANCE = "api.group-events.performance" API_GROUP_HASHES_LEVELS_GET_LEVEL_NEW_ISSUES = "api.group_hashes_levels.get_level_new_issues" + API_GROUP_HASHES_LEVELS_GET_HASH_FOR_PARENT_LEVEL = ( + "api.group_hashes_levels.get_hash_for_parent_level" + ) API_GROUP_HASHES_LEVELS_GET_LEVELS_OVERVIEW = "api.group_hashes_levels.get_levels_overview" API_GROUP_HASHES = "api.group-hashes" API_ISSUES_ISSUE_EVENTS = "api.issues.issue_events" diff --git a/src/sentry/snuba/sessions.py b/src/sentry/snuba/sessions.py index 1066fce03af913..2cbb128cef1b39 100644 --- a/src/sentry/snuba/sessions.py +++ b/src/sentry/snuba/sessions.py @@ -155,10 +155,14 @@ def _check_releases_have_health_data( Condition(Column("release"), Op.IN, release_versions), ], ) - request = Request(dataset="sessions", app_id="default", query=query) - data = snuba.raw_snql_query(request, referrer="snuba.sessions.check_releases_have_health_data")[ - "data" - ] + referrer = "snuba.sessions.check_releases_have_health_data" + request = Request( + dataset="sessions", + app_id="default", + query=query, + tenant_ids={"referrer": referrer, "organization_id": organization_id}, + ) + data = snuba.raw_snql_query(request, referrer)["data"] return {row["release"] for row in data} @@ -265,10 +269,14 @@ def _get_project_releases_count( where=where, having=having, ) - request = Request(dataset="sessions", app_id="default", query=query) - data = snuba.raw_snql_query(request, referrer="snuba.sessions.get_project_releases_count")[ - "data" - ] + referrer = "snuba.sessions.get_project_releases_count" + request = Request( + dataset="sessions", + app_id="default", + query=query, + tenant_ids={"referrer": referrer, "organization_id": organization_id}, + ) + data = snuba.raw_snql_query(request, referrer)["data"] return data[0]["count"] if data else 0 diff --git a/src/sentry/tsdb/snuba.py b/src/sentry/tsdb/snuba.py index bd8ee7f8b7885e..b8f652e44b041f 100644 --- a/src/sentry/tsdb/snuba.py +++ b/src/sentry/tsdb/snuba.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import dataclasses import functools import itertools @@ -272,6 +274,7 @@ def get_data( conditions=None, use_cache=False, jitter_value=None, + tenant_ids: dict[str, str | int] = None, ): if model in self.non_outcomes_snql_query_settings: # no way around having to explicitly map legacy condition format to SnQL since this function @@ -307,6 +310,7 @@ def get_data( model in (TSDBModel.group_generic, TSDBModel.users_affected_by_generic_group) ), is_grouprelease=(model == TSDBModel.frequent_releases_by_group), + tenant_ids=tenant_ids, ) else: return self.__get_data_legacy( @@ -340,6 +344,7 @@ def __get_data_snql( jitter_value: Optional[int] = None, manual_group_on_time: bool = False, is_grouprelease: bool = False, + tenant_ids: dict[str, str | int] = None, ): """ Similar to __get_data_legacy but uses the SnQL format. For future additions, prefer using this impl over @@ -436,6 +441,9 @@ def __get_data_snql( Condition(Column(time_column), Op.LT, end), ] + referrer = f"tsdb-modelid:{model.value}" + tenant_ids = tenant_ids or dict() + tenant_ids["referrer"] = referrer snql_request = Request( dataset=model_dataset.value, app_id="tsdb.get_data", @@ -448,10 +456,9 @@ def __get_data_snql( granularity=Granularity(rollup), limit=Limit(limit), ), + tenant_ids=tenant_ids, ) - query_result = raw_snql_query( - snql_request, referrer=f"tsdb-modelid:{model.value}", use_cache=use_cache - ) + query_result = raw_snql_query(snql_request, referrer=referrer, use_cache=use_cache) if manual_group_on_time: translated_results = {"data": query_result["data"]} else: @@ -705,6 +712,7 @@ def get_range( conditions=None, use_cache=False, jitter_value=None, + tenant_ids=None, ): model_query_settings = self.model_query_settings.get(model) assert model_query_settings is not None, f"Unsupported TSDBModel: {model.name}" @@ -726,6 +734,7 @@ def get_range( conditions=conditions, use_cache=use_cache, jitter_value=jitter_value, + tenant_ids=tenant_ids, ) # convert # {group:{timestamp:count, ...}}