feat(main): main

This commit is contained in:
2026-03-20 15:00:24 -04:00
parent af3076342a
commit c9718c5483
30 changed files with 2513 additions and 559 deletions

227
core/services/backups.py Normal file
View File

@@ -0,0 +1,227 @@
import json
import decimal
import uuid
from dataclasses import dataclass
from datetime import datetime, date, time, timedelta, timezone as dt_timezone
from pathlib import Path
from django.apps import apps
from django.conf import settings
from django.core.management import call_command
from django.core import serializers
from django.db import connection
from django.utils import timezone
from core.models import BackupSchedule, MediaItem
class _BackupJSONEncoder(json.JSONEncoder):
"""Encode types that Django's Python serializer produces but stdlib json cannot handle."""
def default(self, obj):
if isinstance(obj, (datetime, date, time)):
return obj.isoformat()
if isinstance(obj, decimal.Decimal):
return float(obj)
if isinstance(obj, uuid.UUID):
return str(obj)
return super().default(obj)
@dataclass
class BackupFileInfo:
filename: str
size_bytes: int
created_at: str
def get_backup_root() -> Path:
# Backups are always written under a fixed directory.
root = Path(getattr(settings, "BACKUP_ROOT", "/Backups"))
root.mkdir(parents=True, exist_ok=True)
return root
def _export_payload() -> dict:
items = []
for model in apps.get_models():
if model._meta.label_lower == "sessions.session":
continue
queryset = model.objects.all().order_by("pk")
serialized = serializers.serialize("python", queryset)
# Strip cache-path metadata so backups stay DB-only and file-agnostic.
if model is MediaItem:
for item in serialized:
item["fields"]["cached_file_path"] = None
item["fields"]["cache_expires_at"] = None
items.extend(serialized)
return {
"version": 1,
"created_at": timezone.now().isoformat(),
"items": items,
}
def create_backup_now() -> Path:
root = get_backup_root()
stamp = timezone.localtime().strftime("%Y%m%d_%H%M%S")
filename = f"pytv_backup_{stamp}.json"
path = root / filename
payload = _export_payload()
path.write_text(json.dumps(payload, indent=2, cls=_BackupJSONEncoder), encoding="utf-8")
schedule = BackupSchedule.get_solo()
schedule.last_run_at = timezone.now()
schedule.save(update_fields=["last_run_at"])
apply_retention(schedule.retention_count)
return path
def list_backups() -> list[BackupFileInfo]:
root = get_backup_root()
files = sorted(root.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
return [
BackupFileInfo(
filename=f.name,
size_bytes=f.stat().st_size,
created_at=datetime.fromtimestamp(f.stat().st_mtime, tz=dt_timezone.utc).isoformat(),
)
for f in files
]
def apply_retention(retention_count: int):
if retention_count <= 0:
return
files = list_backups()
to_delete = files[retention_count:]
root = get_backup_root()
for item in to_delete:
target = root / item.filename
if target.exists():
target.unlink()
def get_backup_file(filename: str) -> Path:
root = get_backup_root().resolve()
target = (root / filename).resolve()
if not str(target).startswith(str(root)):
raise FileNotFoundError("Invalid backup path")
if not target.exists() or target.suffix.lower() != ".json":
raise FileNotFoundError("Backup not found")
return target
def import_backup_content(content: str, mode: str = "append") -> dict:
if mode not in {"append", "override"}:
raise ValueError("mode must be 'append' or 'override'")
payload = json.loads(content)
items = payload.get("items") if isinstance(payload, dict) else None
if items is None:
raise ValueError("Invalid backup payload")
if mode == "override":
# Flush DB tables first (DB-only restore; does not touch media cache files).
call_command("flush", verbosity=0, interactive=False)
created = 0
updated = 0
serialized = json.dumps(items)
with connection.constraint_checks_disabled():
for deserialized in serializers.deserialize("json", serialized):
obj = deserialized.object
model = obj.__class__
existing = model.objects.filter(pk=obj.pk).first()
if existing is None:
deserialized.save()
created += 1
continue
if mode == "append":
for field in model._meta.local_fields:
if field.primary_key:
continue
setattr(existing, field.name, getattr(obj, field.name))
existing.save()
for m2m_field, values in deserialized.m2m_data.items():
getattr(existing, m2m_field).set(values)
updated += 1
else:
deserialized.save()
updated += 1
return {"created": created, "updated": updated, "mode": mode}
def compute_next_run(schedule: BackupSchedule, now=None):
now = now or timezone.now()
if not schedule.enabled:
return None
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute)
if candidate <= now:
candidate += timedelta(hours=1)
return candidate
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute, hour=schedule.hour)
if schedule.frequency == BackupSchedule.Frequency.DAILY:
if candidate <= now:
candidate += timedelta(days=1)
return candidate
if schedule.frequency == BackupSchedule.Frequency.WEEKLY:
target = schedule.day_of_week
delta = (target - candidate.weekday()) % 7
candidate += timedelta(days=delta)
if candidate <= now:
candidate += timedelta(days=7)
return candidate
# monthly
day = max(1, min(schedule.day_of_month, 28))
candidate = candidate.replace(day=day)
if candidate <= now:
month = candidate.month + 1
year = candidate.year
if month > 12:
month = 1
year += 1
candidate = candidate.replace(year=year, month=month, day=day)
return candidate
def is_backup_due(schedule: BackupSchedule, now=None) -> bool:
now = now or timezone.now()
if not schedule.enabled:
return False
if schedule.last_run_at is None:
return True
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
next_due = schedule.last_run_at + timedelta(hours=1)
elif schedule.frequency == BackupSchedule.Frequency.DAILY:
next_due = schedule.last_run_at + timedelta(days=1)
elif schedule.frequency == BackupSchedule.Frequency.WEEKLY:
next_due = schedule.last_run_at + timedelta(days=7)
else:
next_due = schedule.last_run_at + timedelta(days=28)
next_target = now.replace(second=0, microsecond=0)
if schedule.frequency != BackupSchedule.Frequency.HOURLY:
next_target = next_target.replace(hour=schedule.hour)
next_target = next_target.replace(minute=schedule.minute)
return now >= max(next_due, next_target)

View File

@@ -0,0 +1,92 @@
"""
Bootstrap service — idempotent startup initialisation.
Ensures a default media library exists as soon as the first user is available.
Called from three places:
1. AppConfig.ready() every server start (DB already populated)
2. post_migrate signal after `manage.py migrate` runs
3. auth.setup_admin endpoint immediately after the first user is created
"""
import logging
logger = logging.getLogger(__name__)
DEFAULT_LIBRARY_NAME = "Default Library"
# One-shot flag: after the first request triggers the bootstrap, disconnect
# the signal so we don't repeat the DB check on every subsequent request.
_startup_bootstrap_done = False
def ensure_default_library():
"""
Create a default media library if none exists yet.
Idempotent — safe to call multiple times; does nothing when a library
already exists. Returns the newly-created Library, or None if no action
was taken (either a library already exists, or no users exist yet to be
the owner).
"""
from core.models import Library, AppUser
if Library.objects.exists():
return None # Already bootstrapped
# Need an owner — prefer the first superuser, fall back to any user.
owner = (
AppUser.objects.filter(is_superuser=True).order_by("date_joined").first()
or AppUser.objects.order_by("date_joined").first()
)
if owner is None:
# No users yet — setup_admin will call us again once the first user exists.
logger.debug("ensure_default_library: no users yet, skipping.")
return None
library = Library.objects.create(
owner_user=owner,
name=DEFAULT_LIBRARY_NAME,
visibility="public",
description=(
"Default media library. "
"Add media sources here to start building your channels."
),
)
logger.info(
"Bootstrap: created default library '%s' (id=%d) owned by '%s'.",
library.name,
library.id,
owner.username,
)
return library
# ---------------------------------------------------------------------------
# Signal handlers — wired up in CoreConfig.ready()
# ---------------------------------------------------------------------------
def _on_post_migrate(sender, **kwargs):
"""Run bootstrap after every successful migration."""
try:
ensure_default_library()
except Exception as exc: # pragma: no cover
logger.warning("ensure_default_library failed in post_migrate: %s", exc)
def _on_first_request(sender, **kwargs):
"""
One-shot: run bootstrap on the very first HTTP request after server start.
Disconnects itself immediately so subsequent requests pay zero overhead.
"""
global _startup_bootstrap_done
if _startup_bootstrap_done:
return
_startup_bootstrap_done = True
from django.core.signals import request_started
request_started.disconnect(_on_first_request)
try:
ensure_default_library()
except Exception as exc: # pragma: no cover
logger.warning("ensure_default_library failed on first request: %s", exc)