feat(main): main
This commit is contained in:
12
core/apps.py
12
core/apps.py
@@ -3,3 +3,15 @@ from django.apps import AppConfig
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
name = "core"
|
||||
|
||||
def ready(self):
|
||||
from django.db.models.signals import post_migrate
|
||||
from django.core.signals import request_started
|
||||
from core.services.bootstrap import _on_post_migrate, _on_first_request
|
||||
|
||||
# After every `manage.py migrate` run.
|
||||
post_migrate.connect(_on_post_migrate, sender=self)
|
||||
|
||||
# On every server start, run the check lazily on the first incoming
|
||||
# request so the DB is guaranteed to be ready (avoids RuntimeWarning).
|
||||
request_started.connect(_on_first_request)
|
||||
|
||||
12
core/management/commands/backup_now.py
Normal file
12
core/management/commands/backup_now.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.services.backups import create_backup_now
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Create a backup JSON under /Backups immediately."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
path = create_backup_now()
|
||||
self.stdout.write(self.style.SUCCESS(f"Backup created: {path}"))
|
||||
|
||||
30
core/management/commands/run_backup_worker.py
Normal file
30
core/management/commands/run_backup_worker.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import time
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from core.models import BackupSchedule
|
||||
from core.services.backups import create_backup_now, is_backup_due
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Continuously check backup schedule and run backups when due."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--interval",
|
||||
type=int,
|
||||
default=60,
|
||||
help="Polling interval in seconds (default: 60).",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
interval = options["interval"]
|
||||
self.stdout.write(self.style.SUCCESS(f"Starting backup worker (interval={interval}s)"))
|
||||
|
||||
while True:
|
||||
schedule = BackupSchedule.get_solo()
|
||||
if schedule.enabled and is_backup_due(schedule):
|
||||
path = create_backup_now()
|
||||
self.stdout.write(self.style.SUCCESS(f"Backup created: {path.name}"))
|
||||
time.sleep(interval)
|
||||
|
||||
21
core/migrations/0006_channel_requires_auth.py
Normal file
21
core/migrations/0006_channel_requires_auth.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 6.0.3 on 2026-03-10 12:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0005_mediasource_max_age_days_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="channel",
|
||||
name="requires_auth",
|
||||
field=models.BooleanField(
|
||||
default=False,
|
||||
help_text="If True, only signed-in users can stream or fetch schedules for this channel.",
|
||||
),
|
||||
),
|
||||
]
|
||||
27
core/migrations/0007_backupschedule.py
Normal file
27
core/migrations/0007_backupschedule.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0006_channel_requires_auth'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='BackupSchedule',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('enabled', models.BooleanField(default=False)),
|
||||
('frequency', models.CharField(choices=[('hourly', 'Hourly'), ('daily', 'Daily'), ('weekly', 'Weekly'), ('monthly', 'Monthly')], default='daily', max_length=16)),
|
||||
('minute', models.PositiveSmallIntegerField(default=0)),
|
||||
('hour', models.PositiveSmallIntegerField(default=2)),
|
||||
('day_of_week', models.PositiveSmallIntegerField(default=0)),
|
||||
('day_of_month', models.PositiveSmallIntegerField(default=1)),
|
||||
('retention_count', models.PositiveIntegerField(default=14)),
|
||||
('last_run_at', models.DateTimeField(blank=True, null=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
@@ -231,6 +231,7 @@ class Channel(models.Model):
|
||||
|
||||
scheduling_mode = models.CharField(max_length=24, choices=SchedulingMode.choices, default=SchedulingMode.TEMPLATE_DRIVEN)
|
||||
is_active = models.BooleanField(default=True)
|
||||
requires_auth = models.BooleanField(default=False, help_text="If True, only signed-in users can stream or fetch schedules for this channel.")
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
@@ -514,3 +515,27 @@ class MediaResumePoint(models.Model):
|
||||
constraints = [
|
||||
models.UniqueConstraint(fields=['user', 'media_item'], name='unique_media_resume_point')
|
||||
]
|
||||
|
||||
|
||||
class BackupSchedule(models.Model):
|
||||
class Frequency(models.TextChoices):
|
||||
HOURLY = 'hourly', 'Hourly'
|
||||
DAILY = 'daily', 'Daily'
|
||||
WEEKLY = 'weekly', 'Weekly'
|
||||
MONTHLY = 'monthly', 'Monthly'
|
||||
|
||||
enabled = models.BooleanField(default=False)
|
||||
frequency = models.CharField(max_length=16, choices=Frequency.choices, default=Frequency.DAILY)
|
||||
minute = models.PositiveSmallIntegerField(default=0)
|
||||
hour = models.PositiveSmallIntegerField(default=2)
|
||||
day_of_week = models.PositiveSmallIntegerField(default=0) # 0=Mon
|
||||
day_of_month = models.PositiveSmallIntegerField(default=1)
|
||||
retention_count = models.PositiveIntegerField(default=14)
|
||||
last_run_at = models.DateTimeField(blank=True, null=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
@classmethod
|
||||
def get_solo(cls):
|
||||
obj, _ = cls.objects.get_or_create(id=1)
|
||||
return obj
|
||||
|
||||
|
||||
0
core/scripts/__init__.py
Normal file
0
core/scripts/__init__.py
Normal file
41
core/scripts/make_user.py
Normal file
41
core/scripts/make_user.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import os
|
||||
import sys
|
||||
import django
|
||||
from getpass import getpass
|
||||
|
||||
def main():
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pytv.settings")
|
||||
django.setup()
|
||||
|
||||
from core.models import AppUser
|
||||
|
||||
print("--- Create PYTV User ---")
|
||||
try:
|
||||
username = input("Username: ").strip()
|
||||
if not username:
|
||||
print("Username is required!")
|
||||
sys.exit(1)
|
||||
|
||||
email = input("Email: ").strip()
|
||||
password = getpass("Password: ")
|
||||
|
||||
is_admin_str = input("Is Admin? (y/N): ").strip().lower()
|
||||
is_admin = is_admin_str in ['y', 'yes']
|
||||
|
||||
if AppUser.objects.filter(username=username).exists():
|
||||
print(f"User '{username}' already exists!")
|
||||
sys.exit(1)
|
||||
|
||||
if is_admin:
|
||||
AppUser.objects.create_superuser(username=username, email=email, password=password)
|
||||
print(f"Superuser '{username}' created successfully!")
|
||||
else:
|
||||
AppUser.objects.create_user(username=username, email=email, password=password)
|
||||
print(f"User '{username}' created successfully!")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\nOperation cancelled.")
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
227
core/services/backups.py
Normal file
227
core/services/backups.py
Normal file
@@ -0,0 +1,227 @@
|
||||
import json
|
||||
import decimal
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, date, time, timedelta, timezone as dt_timezone
|
||||
from pathlib import Path
|
||||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core import serializers
|
||||
from django.db import connection
|
||||
from django.utils import timezone
|
||||
|
||||
from core.models import BackupSchedule, MediaItem
|
||||
|
||||
|
||||
class _BackupJSONEncoder(json.JSONEncoder):
|
||||
"""Encode types that Django's Python serializer produces but stdlib json cannot handle."""
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, (datetime, date, time)):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, decimal.Decimal):
|
||||
return float(obj)
|
||||
if isinstance(obj, uuid.UUID):
|
||||
return str(obj)
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupFileInfo:
|
||||
filename: str
|
||||
size_bytes: int
|
||||
created_at: str
|
||||
|
||||
|
||||
def get_backup_root() -> Path:
|
||||
# Backups are always written under a fixed directory.
|
||||
root = Path(getattr(settings, "BACKUP_ROOT", "/Backups"))
|
||||
root.mkdir(parents=True, exist_ok=True)
|
||||
return root
|
||||
|
||||
|
||||
def _export_payload() -> dict:
|
||||
items = []
|
||||
for model in apps.get_models():
|
||||
if model._meta.label_lower == "sessions.session":
|
||||
continue
|
||||
queryset = model.objects.all().order_by("pk")
|
||||
serialized = serializers.serialize("python", queryset)
|
||||
|
||||
# Strip cache-path metadata so backups stay DB-only and file-agnostic.
|
||||
if model is MediaItem:
|
||||
for item in serialized:
|
||||
item["fields"]["cached_file_path"] = None
|
||||
item["fields"]["cache_expires_at"] = None
|
||||
|
||||
items.extend(serialized)
|
||||
|
||||
return {
|
||||
"version": 1,
|
||||
"created_at": timezone.now().isoformat(),
|
||||
"items": items,
|
||||
}
|
||||
|
||||
|
||||
def create_backup_now() -> Path:
|
||||
root = get_backup_root()
|
||||
stamp = timezone.localtime().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"pytv_backup_{stamp}.json"
|
||||
path = root / filename
|
||||
|
||||
payload = _export_payload()
|
||||
path.write_text(json.dumps(payload, indent=2, cls=_BackupJSONEncoder), encoding="utf-8")
|
||||
|
||||
schedule = BackupSchedule.get_solo()
|
||||
schedule.last_run_at = timezone.now()
|
||||
schedule.save(update_fields=["last_run_at"])
|
||||
apply_retention(schedule.retention_count)
|
||||
return path
|
||||
|
||||
|
||||
def list_backups() -> list[BackupFileInfo]:
|
||||
root = get_backup_root()
|
||||
files = sorted(root.glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
return [
|
||||
BackupFileInfo(
|
||||
filename=f.name,
|
||||
size_bytes=f.stat().st_size,
|
||||
created_at=datetime.fromtimestamp(f.stat().st_mtime, tz=dt_timezone.utc).isoformat(),
|
||||
)
|
||||
for f in files
|
||||
]
|
||||
|
||||
|
||||
def apply_retention(retention_count: int):
|
||||
if retention_count <= 0:
|
||||
return
|
||||
files = list_backups()
|
||||
to_delete = files[retention_count:]
|
||||
root = get_backup_root()
|
||||
for item in to_delete:
|
||||
target = root / item.filename
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
|
||||
|
||||
def get_backup_file(filename: str) -> Path:
|
||||
root = get_backup_root().resolve()
|
||||
target = (root / filename).resolve()
|
||||
if not str(target).startswith(str(root)):
|
||||
raise FileNotFoundError("Invalid backup path")
|
||||
if not target.exists() or target.suffix.lower() != ".json":
|
||||
raise FileNotFoundError("Backup not found")
|
||||
return target
|
||||
|
||||
|
||||
def import_backup_content(content: str, mode: str = "append") -> dict:
|
||||
if mode not in {"append", "override"}:
|
||||
raise ValueError("mode must be 'append' or 'override'")
|
||||
|
||||
payload = json.loads(content)
|
||||
items = payload.get("items") if isinstance(payload, dict) else None
|
||||
if items is None:
|
||||
raise ValueError("Invalid backup payload")
|
||||
|
||||
if mode == "override":
|
||||
# Flush DB tables first (DB-only restore; does not touch media cache files).
|
||||
call_command("flush", verbosity=0, interactive=False)
|
||||
|
||||
created = 0
|
||||
updated = 0
|
||||
|
||||
serialized = json.dumps(items)
|
||||
with connection.constraint_checks_disabled():
|
||||
for deserialized in serializers.deserialize("json", serialized):
|
||||
obj = deserialized.object
|
||||
model = obj.__class__
|
||||
existing = model.objects.filter(pk=obj.pk).first()
|
||||
if existing is None:
|
||||
deserialized.save()
|
||||
created += 1
|
||||
continue
|
||||
|
||||
if mode == "append":
|
||||
for field in model._meta.local_fields:
|
||||
if field.primary_key:
|
||||
continue
|
||||
setattr(existing, field.name, getattr(obj, field.name))
|
||||
existing.save()
|
||||
for m2m_field, values in deserialized.m2m_data.items():
|
||||
getattr(existing, m2m_field).set(values)
|
||||
updated += 1
|
||||
else:
|
||||
deserialized.save()
|
||||
updated += 1
|
||||
|
||||
return {"created": created, "updated": updated, "mode": mode}
|
||||
|
||||
|
||||
def compute_next_run(schedule: BackupSchedule, now=None):
|
||||
now = now or timezone.now()
|
||||
|
||||
if not schedule.enabled:
|
||||
return None
|
||||
|
||||
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
|
||||
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute)
|
||||
if candidate <= now:
|
||||
candidate += timedelta(hours=1)
|
||||
return candidate
|
||||
|
||||
candidate = now.replace(second=0, microsecond=0, minute=schedule.minute, hour=schedule.hour)
|
||||
|
||||
if schedule.frequency == BackupSchedule.Frequency.DAILY:
|
||||
if candidate <= now:
|
||||
candidate += timedelta(days=1)
|
||||
return candidate
|
||||
|
||||
if schedule.frequency == BackupSchedule.Frequency.WEEKLY:
|
||||
target = schedule.day_of_week
|
||||
delta = (target - candidate.weekday()) % 7
|
||||
candidate += timedelta(days=delta)
|
||||
if candidate <= now:
|
||||
candidate += timedelta(days=7)
|
||||
return candidate
|
||||
|
||||
# monthly
|
||||
day = max(1, min(schedule.day_of_month, 28))
|
||||
candidate = candidate.replace(day=day)
|
||||
if candidate <= now:
|
||||
month = candidate.month + 1
|
||||
year = candidate.year
|
||||
if month > 12:
|
||||
month = 1
|
||||
year += 1
|
||||
candidate = candidate.replace(year=year, month=month, day=day)
|
||||
return candidate
|
||||
|
||||
|
||||
def is_backup_due(schedule: BackupSchedule, now=None) -> bool:
|
||||
now = now or timezone.now()
|
||||
if not schedule.enabled:
|
||||
return False
|
||||
|
||||
if schedule.last_run_at is None:
|
||||
return True
|
||||
|
||||
if schedule.frequency == BackupSchedule.Frequency.HOURLY:
|
||||
next_due = schedule.last_run_at + timedelta(hours=1)
|
||||
elif schedule.frequency == BackupSchedule.Frequency.DAILY:
|
||||
next_due = schedule.last_run_at + timedelta(days=1)
|
||||
elif schedule.frequency == BackupSchedule.Frequency.WEEKLY:
|
||||
next_due = schedule.last_run_at + timedelta(days=7)
|
||||
else:
|
||||
next_due = schedule.last_run_at + timedelta(days=28)
|
||||
|
||||
next_target = now.replace(second=0, microsecond=0)
|
||||
if schedule.frequency != BackupSchedule.Frequency.HOURLY:
|
||||
next_target = next_target.replace(hour=schedule.hour)
|
||||
next_target = next_target.replace(minute=schedule.minute)
|
||||
|
||||
return now >= max(next_due, next_target)
|
||||
|
||||
|
||||
|
||||
92
core/services/bootstrap.py
Normal file
92
core/services/bootstrap.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""
|
||||
Bootstrap service — idempotent startup initialisation.
|
||||
|
||||
Ensures a default media library exists as soon as the first user is available.
|
||||
Called from three places:
|
||||
1. AppConfig.ready() – every server start (DB already populated)
|
||||
2. post_migrate signal – after `manage.py migrate` runs
|
||||
3. auth.setup_admin endpoint – immediately after the first user is created
|
||||
"""
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_LIBRARY_NAME = "Default Library"
|
||||
|
||||
# One-shot flag: after the first request triggers the bootstrap, disconnect
|
||||
# the signal so we don't repeat the DB check on every subsequent request.
|
||||
_startup_bootstrap_done = False
|
||||
|
||||
|
||||
def ensure_default_library():
|
||||
"""
|
||||
Create a default media library if none exists yet.
|
||||
|
||||
Idempotent — safe to call multiple times; does nothing when a library
|
||||
already exists. Returns the newly-created Library, or None if no action
|
||||
was taken (either a library already exists, or no users exist yet to be
|
||||
the owner).
|
||||
"""
|
||||
from core.models import Library, AppUser
|
||||
|
||||
if Library.objects.exists():
|
||||
return None # Already bootstrapped
|
||||
|
||||
# Need an owner — prefer the first superuser, fall back to any user.
|
||||
owner = (
|
||||
AppUser.objects.filter(is_superuser=True).order_by("date_joined").first()
|
||||
or AppUser.objects.order_by("date_joined").first()
|
||||
)
|
||||
if owner is None:
|
||||
# No users yet — setup_admin will call us again once the first user exists.
|
||||
logger.debug("ensure_default_library: no users yet, skipping.")
|
||||
return None
|
||||
|
||||
library = Library.objects.create(
|
||||
owner_user=owner,
|
||||
name=DEFAULT_LIBRARY_NAME,
|
||||
visibility="public",
|
||||
description=(
|
||||
"Default media library. "
|
||||
"Add media sources here to start building your channels."
|
||||
),
|
||||
)
|
||||
logger.info(
|
||||
"Bootstrap: created default library '%s' (id=%d) owned by '%s'.",
|
||||
library.name,
|
||||
library.id,
|
||||
owner.username,
|
||||
)
|
||||
return library
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Signal handlers — wired up in CoreConfig.ready()
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _on_post_migrate(sender, **kwargs):
|
||||
"""Run bootstrap after every successful migration."""
|
||||
try:
|
||||
ensure_default_library()
|
||||
except Exception as exc: # pragma: no cover
|
||||
logger.warning("ensure_default_library failed in post_migrate: %s", exc)
|
||||
|
||||
|
||||
def _on_first_request(sender, **kwargs):
|
||||
"""
|
||||
One-shot: run bootstrap on the very first HTTP request after server start.
|
||||
Disconnects itself immediately so subsequent requests pay zero overhead.
|
||||
"""
|
||||
global _startup_bootstrap_done
|
||||
if _startup_bootstrap_done:
|
||||
return
|
||||
_startup_bootstrap_done = True
|
||||
|
||||
from django.core.signals import request_started
|
||||
request_started.disconnect(_on_first_request)
|
||||
|
||||
try:
|
||||
ensure_default_library()
|
||||
except Exception as exc: # pragma: no cover
|
||||
logger.warning("ensure_default_library failed on first request: %s", exc)
|
||||
|
||||
Reference in New Issue
Block a user