Skip to content

Commit b074f9a

Browse files
author
Andrii Soldatenko
committed
refactor a bit
1 parent d20d71e commit b074f9a

File tree

6 files changed

+50
-29
lines changed

6 files changed

+50
-29
lines changed

src/sentry/conf/server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -576,7 +576,6 @@ def SOCIAL_AUTH_DEFAULT_USERNAME():
576576
"sentry.tasks.deletion",
577577
"sentry.tasks.deliver_from_outbox",
578578
"sentry.tasks.digests",
579-
"sentry.tasks.dynamic_sampling",
580579
"sentry.tasks.email",
581580
"sentry.tasks.files",
582581
"sentry.tasks.groupowner",
@@ -602,6 +601,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME():
602601
"sentry.tasks.user_report",
603602
"sentry.profiles.task",
604603
"sentry.release_health.tasks",
604+
"sentry.dynamic_sampling.tasks",
605605
"sentry.utils.suspect_resolutions.get_suspect_resolutions",
606606
"sentry.utils.suspect_resolutions_releases.get_suspect_resolutions_releases",
607607
"sentry.tasks.derive_code_mappings",

src/sentry/dynamic_sampling/prioritise_projects.py

Lines changed: 9 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -4,27 +4,11 @@
44
from datetime import datetime, timedelta
55
from typing import Mapping, Sequence
66

7-
from snuba_sdk import (
8-
Column,
9-
Condition,
10-
Direction,
11-
Entity,
12-
Function,
13-
Granularity,
14-
Op,
15-
OrderBy,
16-
Query,
17-
Request,
18-
)
7+
from snuba_sdk import Column, Condition, Direction, Entity, Granularity, Op, OrderBy, Query, Request
198

20-
from sentry.release_health.release_monitor.base import BaseReleaseMonitorBackend, Totals
21-
from sentry.sentry_metrics import indexer
22-
from sentry.sentry_metrics.configuration import UseCaseKey
23-
from sentry.sentry_metrics.indexer.strings import SESSION_METRIC_NAMES
24-
from sentry.sentry_metrics.utils import resolve_tag_key
9+
from sentry.sentry_metrics.indexer.strings import TRANSACTION_METRICS_NAMES
2510
from sentry.snuba.dataset import Dataset, EntityKey
26-
from sentry.snuba.metrics.naming_layer.mri import SessionMRI
27-
from sentry.utils import metrics
11+
from sentry.snuba.metrics.naming_layer.mri import TransactionMRI
2812
from sentry.utils.snuba import raw_snql_query
2913

3014
logger = logging.getLogger(__name__)
@@ -33,7 +17,6 @@
3317

3418

3519
def fetch_projects_with_total_volumes() -> Mapping[int, Sequence[int]]:
36-
# TODO: (andrii) include only "disconnected" projects or independent in tracing context
3720
aggregated_projects = defaultdict(list)
3821
start_time = time.time()
3922
offset = 0
@@ -52,7 +35,7 @@ def fetch_projects_with_total_volumes() -> Mapping[int, Sequence[int]]:
5235
Condition(
5336
Column("metric_id"),
5437
Op.EQ,
55-
SESSION_METRIC_NAMES[SessionMRI.SESSION.value],
38+
TRANSACTION_METRICS_NAMES[TransactionMRI.COUNT_PER_ROOT_PROJECT.value],
5639
),
5740
],
5841
granularity=Granularity(3600),
@@ -66,9 +49,8 @@ def fetch_projects_with_total_volumes() -> Mapping[int, Sequence[int]]:
6649
)
6750
request = Request(dataset=Dataset.Metrics.value, app_id="dynamic_sampling", query=query)
6851
data = raw_snql_query(
69-
# TODO: replace to new referrer
7052
request,
71-
referrer="dynamic_sampling.fetch_projects_with_recent_sessions",
53+
referrer="dynamic_sampling.fetch_projects_with_total_volumes",
7254
)["data"]
7355
count = len(data)
7456
more_results = count > CHUNK_SIZE
@@ -90,3 +72,7 @@ def fetch_projects_with_total_volumes() -> Mapping[int, Sequence[int]]:
9072
)
9173

9274
return aggregated_projects
75+
76+
77+
def process_projects_with_total_volumes(project_ids):
78+
...
Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,36 @@
11
import logging
22

3+
from sentry.dynamic_sampling.prioritise_projects import fetch_projects_with_total_volumes
34
from sentry.tasks.base import instrumented_task
45

56
CHUNK_SIZE = 1000
67
MAX_SECONDS = 60
78

8-
logger = logging.getLogger("sentry.tasks.dynamic_sampling")
9+
logger = logging.getLogger(__name__)
910

1011

1112
@instrumented_task(
12-
name="sentry.dynamic_sampling.tasks.foo",
13-
queue="releasemonitor",
13+
name="sentry.dynamic_sampling.tasks.prioritise_projects",
14+
queue="dynamicsampling",
1415
default_retry_delay=5,
1516
max_retries=5,
1617
) # type: ignore
17-
def foo(**kwargs) -> None:
18+
def prioritise_projects(**kwargs) -> None:
1819
for org_id, project_ids in fetch_projects_with_total_volumes().items():
19-
process_projects_with_sessions.delay(org_id, project_ids)
20+
process_projects_sample_rates.delay(org_id, project_ids)
21+
22+
23+
@instrumented_task(
24+
name="sentry.dynamic_sampling.process_projects_sample_rates",
25+
queue="dynamicsampling",
26+
default_retry_delay=5,
27+
max_retries=5,
28+
) # type: ignore
29+
def process_projects_sample_rates(org_id, project_ids) -> None:
30+
"""
31+
Takes a single org id and a list of project ids
32+
"""
33+
...
34+
35+
# Get adjusted sample rate via adjustment model
36+
#

src/sentry/relay/config/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -493,6 +493,7 @@ def _filter_option_to_config_setting(flt: _FilterSpec, setting: str) -> Mapping[
493493
[
494494
"s:transactions/user@none",
495495
"d:transactions/duration@millisecond",
496+
"c:transactions/count_per_root_project@none",
496497
]
497498
)
498499

src/sentry/snuba/metrics/naming_layer/mri.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,7 @@ class TransactionMRI(Enum):
8181
# Ingested
8282
USER = "s:transactions/user@none"
8383
DURATION = "d:transactions/duration@millisecond"
84+
COUNT_PER_ROOT_PROJECT = "c:transactions/count_per_root_project@none"
8485
MEASUREMENTS_FCP = "d:transactions/measurements.fcp@millisecond"
8586
MEASUREMENTS_LCP = "d:transactions/measurements.lcp@millisecond"
8687
MEASUREMENTS_APP_START_COLD = "d:transactions/measurements.app_start_cold@millisecond"
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import pytest
2+
3+
from sentry.dynamic_sampling.tasks import fetch_projects_with_total_volumes
4+
5+
6+
@pytest.mark.django_db
7+
def test_simple(default_project):
8+
test_data = [
9+
{
10+
"org_id": [default_project.organization.id],
11+
"project_id": [default_project.id],
12+
},
13+
]
14+
assert 1 == 1
15+
_ = test_data
16+
fetch_projects_with_total_volumes()

0 commit comments

Comments
 (0)