Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
cfcbe91
QueryBuilders
rahul-kumar-saini Mar 7, 2023
f58a0a2
get_series
rahul-kumar-saini Mar 7, 2023
d87ca86
refactor
rahul-kumar-saini Mar 7, 2023
92110d4
get_series
rahul-kumar-saini Mar 8, 2023
4226518
outcomes_query_timeseries
rahul-kumar-saini Mar 8, 2023
8dc1cef
Merge branch 'master' into rahul/feat/querybuilder_tenant_ids
rahul-kumar-saini Mar 8, 2023
786dbe8
mypy
rahul-kumar-saini Mar 8, 2023
7a4e825
outcomes.totals
rahul-kumar-saini Mar 8, 2023
49dca48
snuba_search
rahul-kumar-saini Mar 8, 2023
7dd5c69
cdc
rahul-kumar-saini Mar 8, 2023
7d7561b
Merge branch 'rahul/feat/search_executors_tenant_ids' into rahul/feat…
rahul-kumar-saini Mar 8, 2023
e9e3f6b
group serializer
rahul-kumar-saini Mar 8, 2023
978eaa7
typing
rahul-kumar-saini Mar 8, 2023
5358eee
Merge branch 'master' into rahul/feat/eventstore_tenant_ids
rahul-kumar-saini Mar 8, 2023
0d13cea
easy get_events()
rahul-kumar-saini Mar 9, 2023
602e961
filter_by_event_id
rahul-kumar-saini Mar 9, 2023
2bbf527
tasks
rahul-kumar-saini Mar 13, 2023
bd4f1a9
Merge branch 'master' into rahul/feat/eventstore_tenant_ids
rahul-kumar-saini Mar 14, 2023
00f6524
undo test util change
rahul-kumar-saini Mar 14, 2023
f410f05
Merge branch 'master' into rahul/feat/eventstore_tenant_ids
rahul-kumar-saini Mar 14, 2023
78952ab
outcomes.timeseries referrer optional
rahul-kumar-saini Mar 14, 2023
7343733
disclaimer for outcomes.timeseries
rahul-kumar-saini Mar 14, 2023
7f987a3
typing
rahul-kumar-saini Mar 14, 2023
0ad1298
api.project-events
rahul-kumar-saini Mar 14, 2023
4dfa970
get_unfetched_events
rahul-kumar-saini Mar 14, 2023
13f410a
get next/prev event
rahul-kumar-saini Mar 14, 2023
edf1ee6
preview.get_events
rahul-kumar-saini Mar 14, 2023
655fa98
rest of preview
rahul-kumar-saini Mar 14, 2023
2262665
reorder outcomes param
rahul-kumar-saini Mar 14, 2023
ea8367a
get_from_cache
rahul-kumar-saini Mar 15, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/group_hashes.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def get(self, request: Request, group) -> Response:
groupby=["primary_hash"],
referrer="api.group-hashes",
orderby=["-latest_event_timestamp"],
tenant_ids={"organization_id": group.project.organization_id},
)

handle_results = partial(self.__handle_results, group.project_id, group.id, request.user)
Expand Down
4 changes: 3 additions & 1 deletion src/sentry/api/endpoints/organization_eventid.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ def get(self, request: Request, organization, event_id) -> Response:
project_ids=list(project_slugs_by_id.keys()),
event_ids=[event_id],
)
event = eventstore.get_events(filter=snuba_filter, limit=1)[0]
event = eventstore.get_events(
filter=snuba_filter, limit=1, tenant_ids={"organization_id": organization.id}
)[0]
except IndexError:
raise ResourceDoesNotExist()
else:
Expand Down
8 changes: 7 additions & 1 deletion src/sentry/api/endpoints/organization_group_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,13 @@ def get(self, request: Request, organization) -> Response:
direct_hit_projects = (
set(project_ids) | request.access.project_ids_with_team_membership
)
groups = list(Group.objects.filter_by_event_id(direct_hit_projects, event_id))
groups = list(
Group.objects.filter_by_event_id(
direct_hit_projects,
event_id,
tenant_ids={"organization_id": organization.id},
)
)
if len(groups) == 1:
serialized_groups = serialize(groups, request.user, serializer())
if event_id:
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/project_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def get(self, request: Request, project) -> Response:
eventstore.get_events,
filter=event_filter,
referrer="api.project-events",
tenant_ids={"organization_id": project.organization_id},
)

serializer = EventSerializer() if full else SimpleEventSerializer()
Expand Down
1 change: 1 addition & 0 deletions src/sentry/data_export/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,7 @@ def get_processor(data_export, environment_id):
group_id=payload["group"],
key=payload["key"],
environment_id=environment_id,
tenant_ids={"organization_id": data_export.organization_id},
)
elif data_export.query_type == ExportQueryType.DISCOVER:
processor = DiscoverProcessor(
Expand Down
3 changes: 3 additions & 0 deletions src/sentry/deletions/defaults/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,9 @@ def chunk(self):
limit=self.DEFAULT_CHUNK_SIZE,
referrer="deletions.group",
orderby=["-timestamp", "-event_id"],
tenant_ids={"organization_id": self.groups[0].project.organization_id}
if self.groups
else None,
)
if not events:
# Remove all group events now that their node data has been removed.
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/eventstore/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ def get_events(
limit=100,
offset=0,
referrer="eventstore.get_events",
tenant_ids=None,
):
"""
Fetches a list of events given a set of criteria.
Expand All @@ -179,6 +180,7 @@ def get_unfetched_events(
limit=100,
offset=0,
referrer="eventstore.get_unfetched_events",
tenant_ids=None,
):
"""
Same as get_events but returns events without their node datas loaded.
Expand Down
23 changes: 20 additions & 3 deletions src/sentry/eventstore/snuba/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def get_events(
offset=DEFAULT_OFFSET,
referrer="eventstore.get_events",
dataset=snuba.Dataset.Events,
tenant_ids=None,
):
"""
Get events from Snuba, with node data loaded.
Expand All @@ -68,6 +69,7 @@ def get_events(
referrer=referrer,
should_bind_nodes=True,
dataset=dataset,
tenant_ids=tenant_ids,
)

def get_unfetched_events(
Expand All @@ -78,6 +80,7 @@ def get_unfetched_events(
offset=DEFAULT_OFFSET,
referrer="eventstore.get_unfetched_events",
dataset=snuba.Dataset.Events,
tenant_ids=None,
):
"""
Get events from Snuba, without node data loaded.
Expand All @@ -90,6 +93,7 @@ def get_unfetched_events(
referrer=referrer,
should_bind_nodes=False,
dataset=dataset,
tenant_ids=tenant_ids,
)

def __get_events(
Expand All @@ -101,6 +105,7 @@ def __get_events(
referrer=None,
should_bind_nodes=False,
dataset=snuba.Dataset.Events,
tenant_ids=None,
):
assert filter, "You must provide a filter"
cols = self.__get_columns(dataset)
Expand Down Expand Up @@ -142,6 +147,7 @@ def __get_events(
offset=DEFAULT_OFFSET,
referrer=referrer,
dataset=dataset,
tenant_ids=tenant_ids,
)

if "error" not in result:
Expand Down Expand Up @@ -169,6 +175,7 @@ def __get_events(
offset=offset,
referrer=referrer,
dataset=dataset,
tenant_ids=tenant_ids,
)

if "error" not in result:
Expand Down Expand Up @@ -279,7 +286,12 @@ def get_next_event_id(self, event, filter):
filter.conditions.extend(get_after_event_condition(event))
filter.start = event.datetime
dataset = self._get_dataset_for_event(event)
return self.__get_event_id_from_filter(filter=filter, orderby=ASC_ORDERING, dataset=dataset)
return self.__get_event_id_from_filter(
filter=filter,
orderby=ASC_ORDERING,
dataset=dataset,
tenant_ids={"organization_id": event.project.organization_id},
)

def get_prev_event_id(self, event, filter):
"""
Expand All @@ -299,13 +311,18 @@ def get_prev_event_id(self, event, filter):
filter.end = event.datetime + timedelta(seconds=1)
dataset = self._get_dataset_for_event(event)
return self.__get_event_id_from_filter(
filter=filter, orderby=DESC_ORDERING, dataset=dataset
filter=filter,
orderby=DESC_ORDERING,
dataset=dataset,
tenant_ids={"organization_id": event.project.organization_id},
)

def __get_columns(self, dataset: Dataset):
return [col.value.event_name for col in EventStorage.minimal_columns[dataset]]

def __get_event_id_from_filter(self, filter=None, orderby=None, dataset=snuba.Dataset.Discover):
def __get_event_id_from_filter(
self, filter=None, orderby=None, dataset=snuba.Dataset.Discover, tenant_ids=None
):
columns = [Columns.EVENT_ID.value.alias, Columns.PROJECT_ID.value.alias]
try:
# This query uses the discover dataset to enable
Expand Down
4 changes: 3 additions & 1 deletion src/sentry/models/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ def get_oldest_or_latest_event_for_environments(
orderby=ordering.value,
referrer="Group.get_latest",
dataset=dataset,
tenant_ids={"organization_id": group.project.organization_id},
)

if events:
Expand Down Expand Up @@ -290,7 +291,7 @@ def from_event_id(self, project, event_id):

return self.get(id=group_id)

def filter_by_event_id(self, project_ids, event_id):
def filter_by_event_id(self, project_ids, event_id, tenant_ids=None):
events = eventstore.get_events(
filter=eventstore.Filter(
event_ids=[event_id],
Expand All @@ -299,6 +300,7 @@ def filter_by_event_id(self, project_ids, event_id):
),
limit=max(len(project_ids), 100),
referrer="Group.filter_by_event_id",
tenant_ids=tenant_ids,
)
return self.filter(id__in={event.group_id for event in events})

Expand Down
13 changes: 10 additions & 3 deletions src/sentry/rules/history/preview.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,9 @@ def get_top_groups(
},
has_issue_state_condition,
)
query_params.append(SnubaQueryParams(**kwargs))
query_params.append(
SnubaQueryParams(**kwargs, tenant_ids={"organization_id": project.organization_id})
)

groups = []
for result in bulk_raw_query(query_params, use_cache=True, referrer="preview.get_top_groups"):
Expand Down Expand Up @@ -313,6 +315,7 @@ def get_events(
events = []

query_params = []
tenant_ids = {"organization_id": project.organization_id}
# query events by group_id (first event for each group)
for dataset, ids in group_ids.items():
if dataset not in columns or dataset == Dataset.Transactions:
Expand All @@ -332,7 +335,7 @@ def get_events(
"selected_columns": columns[dataset] + ["group_id"],
},
)
query_params.append(SnubaQueryParams(**kwargs))
query_params.append(SnubaQueryParams(**kwargs, tenant_ids=tenant_ids))

# query events by event_id
for dataset, ids in event_ids.items():
Expand All @@ -346,6 +349,7 @@ def get_events(
filter_keys={"project_id": [project.id]},
conditions=[("event_id", "IN", ids)],
selected_columns=columns[dataset],
tenant_ids=tenant_ids,
)
)

Expand Down Expand Up @@ -516,7 +520,10 @@ def get_frequency_buckets(
},
)
bucket_counts = raw_query(
**kwargs, use_cache=True, referrer="preview.get_frequency_buckets"
**kwargs,
use_cache=True,
referrer="preview.get_frequency_buckets",
tenant_ids={"organization_id": project.organization_id},
).get("data", [])

for bucket in bucket_counts:
Expand Down
11 changes: 9 additions & 2 deletions src/sentry/snuba/outcomes.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,15 @@ def run_outcomes_query_totals(


def run_outcomes_query_timeseries(
query: QueryDefinition, tenant_ids: dict[str, Any] | None = None
query: QueryDefinition,
referrer: str = "outcomes.timeseries",
tenant_ids: dict[str, Any] | None = None,
) -> ResultSet:
"""
Runs an outcomes query. By default the referrer is `outcomes.timeseries` and this should not change
unless there is a very specific reason to do so. Eg. getsentry uses this function for billing
metrics, so the referrer is different as it's no longer a "product" query.
"""
snql_query = Query(
match=Entity(query.match),
select=query.select_params,
Expand All @@ -334,7 +341,7 @@ def run_outcomes_query_timeseries(
request = Request(
dataset=query.dataset.value, app_id="default", query=snql_query, tenant_ids=tenant_ids
)
result_timeseries = raw_snql_query(request, referrer="outcomes.timeseries")
result_timeseries = raw_snql_query(request, referrer=referrer)
return _format_rows(result_timeseries["data"], query)


Expand Down
4 changes: 4 additions & 0 deletions src/sentry/tasks/reprocessing2.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from sentry import eventstore, eventstream, nodestore
from sentry.eventstore.models import Event
from sentry.models import Project
from sentry.reprocessing2 import buffered_delete_old_primary_hash
from sentry.tasks.base import instrumented_task, retry
from sentry.tasks.process_buffer import buffer_incr
Expand Down Expand Up @@ -64,6 +65,9 @@ def reprocess_group(
batch_size=settings.SENTRY_REPROCESSING_PAGE_SIZE,
state=query_state,
referrer="reprocessing2.reprocess_group",
tenant_ids={
"organization_id": Project.objects.get_from_cache(id=project_id).organization_id
},
)

if not events:
Expand Down
1 change: 1 addition & 0 deletions src/sentry/tasks/unmerge.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,6 +489,7 @@ def unmerge(*posargs, **kwargs):
batch_size=args.batch_size,
state=last_event,
referrer="unmerge",
tenant_ids={"organization_id": source.project.organization_id},
)

# If there are no more events to process, we're done with the migration.
Expand Down
10 changes: 9 additions & 1 deletion src/sentry/utils/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,14 @@ class InvalidQuerySetError(ValueError):
pass


def celery_run_batch_query(filter, batch_size, referrer, state=None, fetch_events=True):
def celery_run_batch_query(
filter,
batch_size,
referrer,
state=None,
fetch_events=True,
tenant_ids=None,
):
"""
A tool for batched queries similar in purpose to RangeQuerySetWrapper that
is used for celery tasks in issue merge/unmerge/reprocessing.
Expand Down Expand Up @@ -50,6 +57,7 @@ def celery_run_batch_query(filter, batch_size, referrer, state=None, fetch_event
limit=batch_size,
referrer=referrer,
orderby=["-timestamp", "-event_id"],
tenant_ids=tenant_ids,
)
)

Expand Down