Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion peterbecom/base/analytics_referrer_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def create_analytics_referrer_events(max=100, min_hours_old=2):
.filter(meta__referrer__isnull=False)
.order_by("-created")
)

print(qs.query)
batch = []
for event in qs[:max]:
referrer = event.meta["referrer"]
Expand Down
18 changes: 18 additions & 0 deletions peterbecom/base/migrations/0019_alter_analyticsgeoevent_created.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 5.2.3 on 2025-06-30 22:44

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('base', '0018_delete_commandrun'),
]

operations = [
migrations.AlterField(
model_name='analyticsgeoevent',
name='created',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]
2 changes: 1 addition & 1 deletion peterbecom/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ class AnalyticsGeoEvent(models.Model):
country = models.CharField(max_length=100, null=True)
latitude = models.FloatField(null=True)
longitude = models.FloatField(null=True)
created = models.DateTimeField(auto_now_add=True)
created = models.DateTimeField(auto_now_add=True, db_index=True)
lookup = models.JSONField(default=dict)


Expand Down
44 changes: 36 additions & 8 deletions peterbecom/base/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,34 @@
from peterbecom.base.xcache_analyzer import get_x_cache


def get_full_path(func):
return f"{func.__module__}.{func.__qualname__}"


def log_task_run(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
t0 = time.time()
failed = False
try:
func(*args, **kwargs)
except Exception:
failed = True
finally:
t1 = time.time()
if t1 - t0 < 1:
took = f"{(t1 - t0) * 1000:.1f}ms"
else:
took = f"{(t1 - t0):.2f}s"
print(
f"(Crontab Task) {func.__module__}.{func.__qualname__}",
f"{'Failed!' if failed else 'Worked.'}",
f"Took {took}. ({timezone.now()})",
)

return wrapper


def measure_post_process(func):
@functools.wraps(func)
def inner(filepath, url, *args, **kwargs):
Expand Down Expand Up @@ -54,6 +82,7 @@ def inner(filepath, url, *args, **kwargs):


@periodic_task(crontab(minute="*"))
@log_task_run
def run_purge_cdn_urls():
CDNPurgeURL.purge_old()
for i in range(3):
Expand Down Expand Up @@ -108,6 +137,7 @@ def post_process_after_cdn_purge(url):


@periodic_task(crontab(hour="*", minute="3"))
@log_task_run
def purge_old_cdnpurgeurls():
old = timezone.now() - datetime.timedelta(days=30)
ancient = CDNPurgeURL.objects.filter(created__lt=old)
Expand All @@ -116,6 +146,7 @@ def purge_old_cdnpurgeurls():


@periodic_task(crontab(hour="*", minute="2"))
@log_task_run
def purge_old_postprocessings():
old = timezone.now() - datetime.timedelta(days=30)
ancient = PostProcessing.objects.filter(created__lt=old)
Expand All @@ -133,6 +164,7 @@ def purge_old_postprocessings():


@periodic_task(crontab(minute="*"))
@log_task_run
def health_check_to_disk():
health_file = Path("/tmp/huey_health.json")
try:
Expand All @@ -159,30 +191,26 @@ def health_check_to_disk():


@periodic_task(crontab(minute="2"))
@log_task_run
def create_analytics_geo_events_backfill():
print(
"(Debugging Cron) Executing create_analytics_geo_events_backfill",
timezone.now(),
)
create_analytics_geo_events(max=1000)


@periodic_task(crontab(minute="3"))
@log_task_run
def create_analytics_referrer_events_backfill():
print(
"(Debugging Cron) Executing create_analytics_referrer_events_backfill",
timezone.now(),
)
create_analytics_referrer_events(max=1000)


@periodic_task(crontab(hour="1", minute="2"))
@log_task_run
def delete_old_request_logs():
old = timezone.now() - datetime.timedelta(days=60)
RequestLog.objects.filter(created__lt=old).delete()


@periodic_task(crontab(hour="1", minute="3"))
@log_task_run
def delete_old_analyticsevents():
old = timezone.now() - datetime.timedelta(days=90)
AnalyticsEvent.objects.filter(created__lt=old).delete()