228 lines
7.1 KiB
Python
228 lines
7.1 KiB
Python
|
|
import json
|
||
|
|
import decimal
|
||
|
|
import uuid
|
||
|
|
from dataclasses import dataclass
|
||
|
|
from datetime import datetime, date, time, timedelta, timezone as dt_timezone
|
||
|
|
from pathlib import Path
|
||
|
|
|
||
|
|
from django.apps import apps
|
||
|
|
from django.conf import settings
|
||
|
|
from django.core.management import call_command
|
||
|
|
from django.core import serializers
|
||
|
|
from django.db import connection
|
||
|
|
from django.utils import timezone
|
||
|
|
|
||
|
|
from core.models import BackupSchedule, MediaItem
|
||
|
|
|
||
|
|
|
||
|
|
class _BackupJSONEncoder(json.JSONEncoder):
|
||
|
|
"""Encode types that Django's Python serializer produces but stdlib json cannot handle."""
|
||
|
|
|
||
|
|
def default(self, obj):
|
||
|
|
if isinstance(obj, (datetime, date, time)):
|
||
|
|
return obj.isoformat()
|
||
|
|
if isinstance(obj, decimal.Decimal):
|
||
|
|
return float(obj)
|
||
|
|
if isinstance(obj, uuid.UUID):
|
||
|
|
return str(obj)
|
||
|
|
return super().default(obj)
|
||
|
|
|
||
|
|
|
||
|
|
@dataclass
|
||
|
|
class BackupFileInfo:
|
||
|
|
filename: str
|
||
|
|
size_bytes: int
|
||
|
|
created_at: str
|
||
|
|
|
||
|
|
|
||
|
|
def get_backup_root() -> Path:
|
||
|
|
# Backups are always written under a fixed directory.
|
||
|
|
root = Path(getattr(settings, "BACKUP_ROOT", "/Backups"))
|
||
|
|
root.mkdir(parents=True, exist_ok=True)
|
||
|
|
return root
|
||
|
|
|
||
|
|
|
||
|
|
def _export_payload() -> dict:
|
||
|
|
items = []
|
||
|
|
for model in apps.get_models():
|
||
|
|
if model._meta.label_lower == "sessions.session":
|
||
|
|
continue
|
||
|
|
queryset = model.objects.all().order_by("pk")
|
||
|
|
serialized = serializers.serialize("python", queryset)
|
||
|
|
|
||
|
|
# Strip cache-path metadata so backups stay DB-only and file-agnostic.
|
||
|
|
if model is MediaItem:
|
||
|
|
for item in serialized:
|
||
|
|
item["fields"]["cached_file_path"] = None
|
||
|
|
item["fields"]["cache_expires_at"] = None
|
||
|
|
|
||
|
|
items.extend(serialized)
|
||
|
|
|
||
|
|
return {
|
||
|
|
"version": 1,
|
||
|
|
"created_at": timezone.now().isoformat(),
|
||
|
|
"items": items,
|
||
|
|
}
|
||
|
|
|
||
|
|
|
||
|
|
def create_backup_now() -> Path:
|
||
|
|
root = get_backup_root()
|
||
|
|
stamp = timezone.localtime().strftime("%Y%m%d_%H%M%S")
|
||
|
|
filename = f"pytv_backup_{stamp}.json"
|
||
|
|
path = root / filename
|
||
|
|
|
||
|
|
payload = _export_payload()
|
||
|
|
path.write_text(json.dumps(payload, indent=2, cls=_BackupJSONEncoder), encoding="utf-8")
|
||
|
|
|
||
|
|
schedule = BackupSchedule.get_solo()
|
||
|
|
schedule.last_run_at = timezone.now()
|
||
|
|
schedule.save(update_fields=["last_run_at"])
|
||
|
|
apply_retention(schedule.retention_count)
|
||
|
|
return path
|
||
|
|
|
||
|
|
|
||
|
|
def list_backups() -> list[BackupFileInfo]:
|
||
|
|
root = get_backup_root()
|
||
|
|
files = sorted(root.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||
|
|
return [
|
||
|
|
BackupFileInfo(
|
||
|
|
filename=f.name,
|
||
|
|
size_bytes=f.stat().st_size,
|
||
|
|
created_at=datetime.fromtimestamp(f.stat().st_mtime, tz=dt_timezone.utc).isoformat(),
|
||
|
|
)
|
||
|
|
for f in files
|
||
|
|
]
|
||
|
|
|
||
|
|
|
||
|
|
def apply_retention(retention_count: int):
|
||
|
|
if retention_count <= 0:
|
||
|
|
return
|
||
|
|
files = list_backups()
|
||
|
|
to_delete = files[retention_count:]
|
||
|
|
root = get_backup_root()
|
||
|
|
for item in to_delete:
|
||
|
|
target = root / item.filename
|
||
|
|
if target.exists():
|
||
|
|
target.unlink()
|
||
|
|
|
||
|
|
|
||
|
|
def get_backup_file(filename: str) -> Path:
|
||
|
|
root = get_backup_root().resolve()
|
||
|
|
target = (root / filename).resolve()
|
||
|
|
if not str(target).startswith(str(root)):
|
||
|
|
raise FileNotFoundError("Invalid backup path")
|
||
|
|
if not target.exists() or target.suffix.lower() != ".json":
|
||
|
|
raise FileNotFoundError("Backup not found")
|
||
|
|
return target
|
||
|
|
|
||
|
|
|
||
|
|
def import_backup_content(content: str, mode: str = "append") -> dict:
|
||
|
|
if mode not in {"append", "override"}:
|
||
|
|
raise ValueError("mode must be 'append' or 'override'")
|
||
|
|
|
||
|
|
payload = json.loads(content)
|
||
|
|
items = payload.get("items") if isinstance(payload, dict) else None
|
||
|
|
if items is None:
|
||
|
|
raise ValueError("Invalid backup payload")
|
||
|
|
|
||
|
|
if mode == "override":
|
||
|
|
# Flush DB tables first (DB-only restore; does not touch media cache files).
|
||
|
|
call_command("flush", verbosity=0, interactive=False)
|
||
|
|
|
||
|
|
created = 0
|
||
|
|
updated = 0
|
||
|
|
|
||
|
|
serialized = json.dumps(items)
|
||
|
|
with connection.constraint_checks_disabled():
|
||
|
|
for deserialized in serializers.deserialize("json", serialized):
|
||
|
|
obj = deserialized.object
|
||
|
|
model = obj.__class__
|
||
|
|
existing = model.objects.filter(pk=obj.pk).first()
|
||
|
|
if existing is None:
|
||
|
|
deserialized.save()
|
||
|
|
created += 1
|
||
|
|
continue
|
||
|
|
|
||
|
|
if mode == "append":
|
||
|
|
for field in model._meta.local_fields:
|
||
|
|
if field.primary_key:
|
||
|
|
continue
|
||
|
|
setattr(existing, field.name, getattr(obj, field.name))
|
||
|
|
existing.save()
|
||
|
|
for m2m_field, values in deserialized.m2m_data.items():
|
||
|
|
getattr(existing, m2m_field).set(values)
|
||
|
|
updated += 1
|
||
|
|
else:
|
||
|
|
deserialized.save()
|
||
|
|
updated += 1
|
||
|
|
|
||
|
|
return {"created": created, "updated": updated, "mode": mode}
|
||
|
|
|
||
|
|
|
||
|
|
def compute_next_run(schedule: BackupSchedule, now=None):
|
||
|
|
now = now or timezone.now()
|
||
|
|
|
||
|
|
if not schedule.enabled:
|
||
|
|
return None
|
||
|
|
|
||
|
|
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
|
||
|
|
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute)
|
||
|
|
if candidate <= now:
|
||
|
|
candidate += timedelta(hours=1)
|
||
|
|
return candidate
|
||
|
|
|
||
|
|
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute, hour=schedule.hour)
|
||
|
|
|
||
|
|
if schedule.frequency == BackupSchedule.Frequency.DAILY:
|
||
|
|
if candidate <= now:
|
||
|
|
candidate += timedelta(days=1)
|
||
|
|
return candidate
|
||
|
|
|
||
|
|
if schedule.frequency == BackupSchedule.Frequency.WEEKLY:
|
||
|
|
target = schedule.day_of_week
|
||
|
|
delta = (target - candidate.weekday()) % 7
|
||
|
|
candidate += timedelta(days=delta)
|
||
|
|
if candidate <= now:
|
||
|
|
candidate += timedelta(days=7)
|
||
|
|
return candidate
|
||
|
|
|
||
|
|
# monthly
|
||
|
|
day = max(1, min(schedule.day_of_month, 28))
|
||
|
|
candidate = candidate.replace(day=day)
|
||
|
|
if candidate <= now:
|
||
|
|
month = candidate.month + 1
|
||
|
|
year = candidate.year
|
||
|
|
if month > 12:
|
||
|
|
month = 1
|
||
|
|
year += 1
|
||
|
|
candidate = candidate.replace(year=year, month=month, day=day)
|
||
|
|
return candidate
|
||
|
|
|
||
|
|
|
||
|
|
def is_backup_due(schedule: BackupSchedule, now=None) -> bool:
|
||
|
|
now = now or timezone.now()
|
||
|
|
if not schedule.enabled:
|
||
|
|
return False
|
||
|
|
|
||
|
|
if schedule.last_run_at is None:
|
||
|
|
return True
|
||
|
|
|
||
|
|
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
|
||
|
|
next_due = schedule.last_run_at + timedelta(hours=1)
|
||
|
|
elif schedule.frequency == BackupSchedule.Frequency.DAILY:
|
||
|
|
next_due = schedule.last_run_at + timedelta(days=1)
|
||
|
|
elif schedule.frequency == BackupSchedule.Frequency.WEEKLY:
|
||
|
|
next_due = schedule.last_run_at + timedelta(days=7)
|
||
|
|
else:
|
||
|
|
next_due = schedule.last_run_at + timedelta(days=28)
|
||
|
|
|
||
|
|
next_target = now.replace(second=0, microsecond=0)
|
||
|
|
if schedule.frequency != BackupSchedule.Frequency.HOURLY:
|
||
|
|
next_target = next_target.replace(hour=schedule.hour)
|
||
|
|
next_target = next_target.replace(minute=schedule.minute)
|
||
|
|
|
||
|
|
return now >= max(next_due, next_target)
|
||
|
|
|
||
|
|
|
||
|
|
|