feat: rebuild iOS app from API audit + new podcast/media service
All checks were successful
Security Checks / dependency-audit (push) Successful in 12s
Security Checks / secret-scanning (push) Successful in 3s
Security Checks / dockerfile-lint (push) Successful in 3s

iOS App (complete rebuild):
- Audited all fitness API endpoints against live responses
- Models match exact API field names (snapshot_ prefixes, UUID strings)
- FoodEntry uses computed properties (foodName, calories, etc.) wrapping snapshot fields
- Flexible Int/Double decoding for all numeric fields
- AI assistant with raw JSON state management (JSONSerialization, not Codable)
- Home dashboard with custom background, frosted glass calorie widget
- Fitness: Today/Templates/Goals/Foods tabs
- Food search with recent + all sections
- Meal sections with colored accent bars, swipe to delete
- 120fps ProMotion, iOS 17+ @Observable

Podcast/Media Service:
- FastAPI backend for podcast RSS + local audiobook folders
- Shows, episodes, playback progress, queue management
- RSS feed fetching with feedparser + ETag support
- Local folder scanning with mutagen for audio metadata
- HTTP Range streaming for local audio files
- Playback events logging (play/pause/seek/complete)
- Reuses brain's PostgreSQL + Redis
- media_ prefixed tables

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Yusuf Suleman
2026-04-03 02:36:43 -05:00
parent e350a354a3
commit 69af4b84a5
56 changed files with 4256 additions and 4620 deletions

View File

@@ -0,0 +1,22 @@
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends libpq-dev && rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir --upgrade pip
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN adduser --disabled-password --no-create-home appuser
COPY --chown=appuser app/ app/
EXPOSE 8400
ENV PYTHONUNBUFFERED=1
HEALTHCHECK --interval=30s --timeout=5s --retries=3 \
CMD python3 -c "import urllib.request; urllib.request.urlopen('http://127.0.0.1:8400/api/health', timeout=3)" || exit 1
USER appuser
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8400"]

View File

@@ -0,0 +1,18 @@
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends libpq-dev && rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir --upgrade pip
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN adduser --disabled-password --no-create-home appuser
COPY --chown=appuser app/ app/
ENV PYTHONUNBUFFERED=1
USER appuser
CMD ["python", "-m", "app.worker.tasks"]

View File

View File

View File

@@ -0,0 +1,21 @@
"""API dependencies — auth, database session."""
from fastapi import Header, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
async def get_user_id(
x_gateway_user_id: str = Header(None, alias="X-Gateway-User-Id"),
) -> str:
"""Extract authenticated user ID from gateway-injected header."""
if not x_gateway_user_id:
raise HTTPException(status_code=401, detail="Not authenticated")
return x_gateway_user_id
async def get_db_session() -> AsyncSession:
"""Provide an async database session."""
async for session in get_db():
yield session

View File

@@ -0,0 +1,232 @@
"""Episode listing, detail, and streaming endpoints."""
from __future__ import annotations
import os
import uuid
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException, Header, Query
from fastapi.responses import StreamingResponse, Response
from sqlalchemy import select, and_
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.requests import Request
from app.api.deps import get_user_id, get_db_session
from app.config import CONTENT_TYPES
from app.models import Episode, Show, Progress
router = APIRouter(prefix="/api/episodes", tags=["episodes"])
# ── Named convenience endpoints (must be before /{episode_id}) ──
@router.get("/recent")
async def recent_episodes(
limit: int = Query(20, ge=1, le=100),
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Recently played episodes ordered by last_played_at."""
stmt = (
select(Episode, Show, Progress)
.join(Progress, Progress.episode_id == Episode.id)
.join(Show, Show.id == Episode.show_id)
.where(Progress.user_id == user_id)
.order_by(Progress.last_played_at.desc())
.limit(limit)
)
result = await db.execute(stmt)
return [_format(ep, show, prog) for ep, show, prog in result.all()]
@router.get("/in-progress")
async def in_progress_episodes(
limit: int = Query(20, ge=1, le=100),
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Episodes with progress > 0 and not completed."""
stmt = (
select(Episode, Show, Progress)
.join(Progress, Progress.episode_id == Episode.id)
.join(Show, Show.id == Episode.show_id)
.where(
Progress.user_id == user_id,
Progress.position_seconds > 0,
Progress.is_completed == False, # noqa: E712
)
.order_by(Progress.last_played_at.desc())
.limit(limit)
)
result = await db.execute(stmt)
return [_format(ep, show, prog) for ep, show, prog in result.all()]
# ── List ──
@router.get("")
async def list_episodes(
show_id: Optional[str] = Query(None),
status: Optional[str] = Query(None),
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""List episodes with optional filters: show_id, status (unplayed|in_progress|completed)."""
stmt = (
select(Episode, Show, Progress)
.join(Show, Show.id == Episode.show_id)
.outerjoin(Progress, and_(Progress.episode_id == Episode.id, Progress.user_id == user_id))
.where(Episode.user_id == user_id)
)
if show_id:
stmt = stmt.where(Episode.show_id == uuid.UUID(show_id))
if status == "unplayed":
stmt = stmt.where(Progress.id == None) # noqa: E711
elif status == "in_progress":
stmt = stmt.where(Progress.position_seconds > 0, Progress.is_completed == False) # noqa: E712
elif status == "completed":
stmt = stmt.where(Progress.is_completed == True) # noqa: E712
stmt = stmt.order_by(Episode.published_at.desc().nullslast()).offset(offset).limit(limit)
result = await db.execute(stmt)
return [_format(ep, show, prog) for ep, show, prog in result.all()]
# ── Detail ──
@router.get("/{episode_id}")
async def get_episode(
episode_id: str,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Episode detail with progress."""
stmt = (
select(Episode, Show, Progress)
.join(Show, Show.id == Episode.show_id)
.outerjoin(Progress, and_(Progress.episode_id == Episode.id, Progress.user_id == user_id))
.where(Episode.id == uuid.UUID(episode_id), Episode.user_id == user_id)
)
row = (await db.execute(stmt)).first()
if not row:
raise HTTPException(404, "Episode not found")
ep, show, prog = row
return _format(ep, show, prog)
# ── Stream local audio ──
@router.get("/{episode_id}/stream")
async def stream_episode(
episode_id: str,
request: Request,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Stream local audio file with HTTP Range support for seeking."""
ep = await db.get(Episode, uuid.UUID(episode_id))
if not ep or ep.user_id != user_id:
raise HTTPException(404, "Episode not found")
show = await db.get(Show, ep.show_id)
if not show or show.show_type != "local":
raise HTTPException(400, "Streaming only available for local episodes")
file_path = ep.audio_url
if not file_path or not os.path.isfile(file_path):
raise HTTPException(404, "Audio file not found")
file_size = os.path.getsize(file_path)
ext = os.path.splitext(file_path)[1].lower()
content_type = CONTENT_TYPES.get(ext, "application/octet-stream")
range_header = request.headers.get("range")
return _range_response(file_path, file_size, content_type, range_header)
# ── Helpers ──
def _format(ep: Episode, show: Show, prog: Optional[Progress]) -> dict:
return {
"id": str(ep.id),
"show_id": str(ep.show_id),
"title": ep.title,
"description": ep.description,
"audio_url": ep.audio_url,
"duration_seconds": ep.duration_seconds,
"file_size_bytes": ep.file_size_bytes,
"published_at": ep.published_at.isoformat() if ep.published_at else None,
"artwork_url": ep.artwork_url or show.artwork_url,
"show": {
"id": str(show.id),
"title": show.title,
"author": show.author,
"artwork_url": show.artwork_url,
"show_type": show.show_type,
},
"progress": {
"position_seconds": prog.position_seconds,
"duration_seconds": prog.duration_seconds,
"is_completed": prog.is_completed,
"playback_speed": prog.playback_speed,
"last_played_at": prog.last_played_at.isoformat() if prog.last_played_at else None,
} if prog else None,
}
def _range_response(file_path: str, file_size: int, content_type: str, range_header: Optional[str]):
"""Build a streaming response with optional Range support for seeking."""
if range_header and range_header.startswith("bytes="):
range_spec = range_header[6:]
parts = range_spec.split("-")
start = int(parts[0]) if parts[0] else 0
end = int(parts[1]) if len(parts) > 1 and parts[1] else file_size - 1
end = min(end, file_size - 1)
if start >= file_size:
return Response(status_code=416, headers={"Content-Range": f"bytes */{file_size}"})
length = end - start + 1
def iter_range():
with open(file_path, "rb") as f:
f.seek(start)
remaining = length
while remaining > 0:
chunk = f.read(min(65536, remaining))
if not chunk:
break
remaining -= len(chunk)
yield chunk
return StreamingResponse(
iter_range(),
status_code=206,
media_type=content_type,
headers={
"Content-Range": f"bytes {start}-{end}/{file_size}",
"Content-Length": str(length),
"Accept-Ranges": "bytes",
},
)
def iter_file():
with open(file_path, "rb") as f:
while chunk := f.read(65536):
yield chunk
return StreamingResponse(
iter_file(),
media_type=content_type,
headers={
"Content-Length": str(file_size),
"Accept-Ranges": "bytes",
},
)

View File

@@ -0,0 +1,229 @@
"""Playback control endpoints — play, pause, seek, complete, now-playing, speed."""
from __future__ import annotations
import uuid
from datetime import datetime
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from sqlalchemy import select, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_user_id, get_db_session
from app.models import Episode, Show, Progress, PlaybackEvent
router = APIRouter(prefix="/api/playback", tags=["playback"])
# ── Schemas ──
class PlayRequest(BaseModel):
episode_id: str
position_seconds: float = 0
class PauseRequest(BaseModel):
episode_id: str
position_seconds: float
class SeekRequest(BaseModel):
episode_id: str
position_seconds: float
class CompleteRequest(BaseModel):
episode_id: str
class SpeedRequest(BaseModel):
speed: float
# ── Helpers ──
async def _get_or_create_progress(
db: AsyncSession, user_id: str, episode_id: uuid.UUID
) -> Progress:
"""Get existing progress or create a new one."""
stmt = select(Progress).where(
Progress.user_id == user_id,
Progress.episode_id == episode_id,
)
prog = (await db.execute(stmt)).scalar_one_or_none()
if not prog:
ep = await db.get(Episode, episode_id)
prog = Progress(
user_id=user_id,
episode_id=episode_id,
duration_seconds=ep.duration_seconds if ep else None,
)
db.add(prog)
await db.flush()
return prog
async def _log_event(
db: AsyncSession, user_id: str, episode_id: uuid.UUID,
event_type: str, position: float, speed: float = 1.0,
):
db.add(PlaybackEvent(
user_id=user_id,
episode_id=episode_id,
event_type=event_type,
position_seconds=position,
playback_speed=speed,
))
async def _validate_episode(db: AsyncSession, user_id: str, episode_id_str: str) -> Episode:
eid = uuid.UUID(episode_id_str)
ep = await db.get(Episode, eid)
if not ep or ep.user_id != user_id:
raise HTTPException(404, "Episode not found")
return ep
# ── Endpoints ──
@router.post("/play")
async def play(
body: PlayRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Set episode as currently playing."""
ep = await _validate_episode(db, user_id, body.episode_id)
prog = await _get_or_create_progress(db, user_id, ep.id)
prog.position_seconds = body.position_seconds
prog.last_played_at = datetime.utcnow()
prog.is_completed = False
await _log_event(db, user_id, ep.id, "play", body.position_seconds, prog.playback_speed)
await db.commit()
return {"status": "playing", "position_seconds": prog.position_seconds}
@router.post("/pause")
async def pause(
body: PauseRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Pause and save position."""
ep = await _validate_episode(db, user_id, body.episode_id)
prog = await _get_or_create_progress(db, user_id, ep.id)
prog.position_seconds = body.position_seconds
prog.last_played_at = datetime.utcnow()
await _log_event(db, user_id, ep.id, "pause", body.position_seconds, prog.playback_speed)
await db.commit()
return {"status": "paused", "position_seconds": prog.position_seconds}
@router.post("/seek")
async def seek(
body: SeekRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Seek to position."""
ep = await _validate_episode(db, user_id, body.episode_id)
prog = await _get_or_create_progress(db, user_id, ep.id)
prog.position_seconds = body.position_seconds
prog.last_played_at = datetime.utcnow()
await _log_event(db, user_id, ep.id, "seek", body.position_seconds, prog.playback_speed)
await db.commit()
return {"status": "seeked", "position_seconds": prog.position_seconds}
@router.post("/complete")
async def complete(
body: CompleteRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Mark episode as completed."""
ep = await _validate_episode(db, user_id, body.episode_id)
prog = await _get_or_create_progress(db, user_id, ep.id)
prog.is_completed = True
prog.position_seconds = prog.duration_seconds or prog.position_seconds
prog.last_played_at = datetime.utcnow()
await _log_event(db, user_id, ep.id, "complete", prog.position_seconds, prog.playback_speed)
await db.commit()
return {"status": "completed"}
@router.get("/now-playing")
async def now_playing(
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Get the most recently played episode with progress."""
stmt = (
select(Episode, Show, Progress)
.join(Progress, Progress.episode_id == Episode.id)
.join(Show, Show.id == Episode.show_id)
.where(
Progress.user_id == user_id,
Progress.is_completed == False, # noqa: E712
)
.order_by(Progress.last_played_at.desc())
.limit(1)
)
row = (await db.execute(stmt)).first()
if not row:
return None
ep, show, prog = row
return {
"episode": {
"id": str(ep.id),
"title": ep.title,
"audio_url": ep.audio_url,
"duration_seconds": ep.duration_seconds,
"artwork_url": ep.artwork_url or show.artwork_url,
},
"show": {
"id": str(show.id),
"title": show.title,
"author": show.author,
"artwork_url": show.artwork_url,
"show_type": show.show_type,
},
"progress": {
"position_seconds": prog.position_seconds,
"duration_seconds": prog.duration_seconds,
"is_completed": prog.is_completed,
"playback_speed": prog.playback_speed,
"last_played_at": prog.last_played_at.isoformat() if prog.last_played_at else None,
},
}
@router.post("/speed")
async def set_speed(
body: SpeedRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Update playback speed for all user's active progress records."""
if body.speed < 0.5 or body.speed > 3.0:
raise HTTPException(400, "Speed must be between 0.5 and 3.0")
# Update the most recent (now-playing) progress record's speed
stmt = (
select(Progress)
.where(
Progress.user_id == user_id,
Progress.is_completed == False, # noqa: E712
)
.order_by(Progress.last_played_at.desc())
.limit(1)
)
prog = (await db.execute(stmt)).scalar_one_or_none()
if prog:
prog.playback_speed = body.speed
await db.commit()
return {"speed": body.speed}

View File

@@ -0,0 +1,236 @@
"""Queue management endpoints."""
from __future__ import annotations
import uuid
from datetime import datetime
from typing import List
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from sqlalchemy import select, func, delete, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.api.deps import get_user_id, get_db_session
from app.models import Episode, Show, QueueItem, Progress, PlaybackEvent
router = APIRouter(prefix="/api/queue", tags=["queue"])
# ── Schemas ──
class QueueAddRequest(BaseModel):
episode_id: str
class QueueReorderRequest(BaseModel):
episode_ids: List[str]
# ── Endpoints ──
@router.get("")
async def get_queue(
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Get user's queue ordered by sort_order, with episode and show info."""
stmt = (
select(QueueItem, Episode, Show)
.join(Episode, Episode.id == QueueItem.episode_id)
.join(Show, Show.id == Episode.show_id)
.where(QueueItem.user_id == user_id)
.order_by(QueueItem.sort_order)
)
result = await db.execute(stmt)
rows = result.all()
return [
{
"queue_id": str(qi.id),
"sort_order": qi.sort_order,
"added_at": qi.added_at.isoformat() if qi.added_at else None,
"episode": {
"id": str(ep.id),
"title": ep.title,
"audio_url": ep.audio_url,
"duration_seconds": ep.duration_seconds,
"artwork_url": ep.artwork_url or show.artwork_url,
"published_at": ep.published_at.isoformat() if ep.published_at else None,
},
"show": {
"id": str(show.id),
"title": show.title,
"author": show.author,
"artwork_url": show.artwork_url,
},
}
for qi, ep, show in rows
]
@router.post("/add", status_code=201)
async def add_to_queue(
body: QueueAddRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Add episode to end of queue."""
eid = uuid.UUID(body.episode_id)
ep = await db.get(Episode, eid)
if not ep or ep.user_id != user_id:
raise HTTPException(404, "Episode not found")
# Check if already in queue
existing = await db.execute(
select(QueueItem).where(QueueItem.user_id == user_id, QueueItem.episode_id == eid)
)
if existing.scalar_one_or_none():
raise HTTPException(409, "Episode already in queue")
# Get max sort_order
max_order = await db.scalar(
select(func.coalesce(func.max(QueueItem.sort_order), -1)).where(QueueItem.user_id == user_id)
)
qi = QueueItem(
user_id=user_id,
episode_id=eid,
sort_order=max_order + 1,
)
db.add(qi)
await db.commit()
return {"status": "added", "sort_order": qi.sort_order}
@router.post("/play-next", status_code=201)
async def play_next(
body: QueueAddRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Insert episode at position 1 (after currently playing)."""
eid = uuid.UUID(body.episode_id)
ep = await db.get(Episode, eid)
if not ep or ep.user_id != user_id:
raise HTTPException(404, "Episode not found")
# Remove if already in queue
await db.execute(
delete(QueueItem).where(QueueItem.user_id == user_id, QueueItem.episode_id == eid)
)
# Shift all items at position >= 1 up by 1
stmt = (
select(QueueItem)
.where(QueueItem.user_id == user_id, QueueItem.sort_order >= 1)
.order_by(QueueItem.sort_order.desc())
)
items = (await db.execute(stmt)).scalars().all()
for item in items:
item.sort_order += 1
qi = QueueItem(user_id=user_id, episode_id=eid, sort_order=1)
db.add(qi)
await db.commit()
return {"status": "added", "sort_order": 1}
@router.post("/play-now", status_code=201)
async def play_now(
body: QueueAddRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Insert at position 0 and start playing."""
eid = uuid.UUID(body.episode_id)
ep = await db.get(Episode, eid)
if not ep or ep.user_id != user_id:
raise HTTPException(404, "Episode not found")
# Remove if already in queue
await db.execute(
delete(QueueItem).where(QueueItem.user_id == user_id, QueueItem.episode_id == eid)
)
# Shift everything up
stmt = (
select(QueueItem)
.where(QueueItem.user_id == user_id)
.order_by(QueueItem.sort_order.desc())
)
items = (await db.execute(stmt)).scalars().all()
for item in items:
item.sort_order += 1
qi = QueueItem(user_id=user_id, episode_id=eid, sort_order=0)
db.add(qi)
# Also create/update progress and log play event
prog_stmt = select(Progress).where(Progress.user_id == user_id, Progress.episode_id == eid)
prog = (await db.execute(prog_stmt)).scalar_one_or_none()
if not prog:
prog = Progress(
user_id=user_id,
episode_id=eid,
duration_seconds=ep.duration_seconds,
)
db.add(prog)
prog.last_played_at = datetime.utcnow()
prog.is_completed = False
db.add(PlaybackEvent(
user_id=user_id,
episode_id=eid,
event_type="play",
position_seconds=prog.position_seconds or 0,
playback_speed=prog.playback_speed,
))
await db.commit()
return {"status": "playing", "sort_order": 0}
@router.delete("/{episode_id}", status_code=204)
async def remove_from_queue(
episode_id: str,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Remove episode from queue."""
eid = uuid.UUID(episode_id)
result = await db.execute(
delete(QueueItem).where(QueueItem.user_id == user_id, QueueItem.episode_id == eid)
)
if result.rowcount == 0:
raise HTTPException(404, "Episode not in queue")
await db.commit()
@router.post("/reorder")
async def reorder_queue(
body: QueueReorderRequest,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Reorder queue by providing episode IDs in desired order."""
for i, eid_str in enumerate(body.episode_ids):
eid = uuid.UUID(eid_str)
stmt = select(QueueItem).where(QueueItem.user_id == user_id, QueueItem.episode_id == eid)
qi = (await db.execute(stmt)).scalar_one_or_none()
if qi:
qi.sort_order = i
await db.commit()
return {"status": "reordered", "count": len(body.episode_ids)}
@router.delete("")
async def clear_queue(
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Clear entire queue."""
await db.execute(delete(QueueItem).where(QueueItem.user_id == user_id))
await db.commit()
return {"status": "cleared"}

View File

@@ -0,0 +1,519 @@
"""Show CRUD endpoints."""
from __future__ import annotations
import logging
import uuid
from datetime import datetime
from typing import Optional
import feedparser
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query
from pydantic import BaseModel
from sqlalchemy import select, func, delete
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.api.deps import get_user_id, get_db_session
from app.models import Show, Episode, Progress
log = logging.getLogger(__name__)
router = APIRouter(prefix="/api/shows", tags=["shows"])
# ── Schemas ──
class ShowCreate(BaseModel):
feed_url: Optional[str] = None
local_path: Optional[str] = None
title: Optional[str] = None
class ShowOut(BaseModel):
id: str
title: str
author: Optional[str] = None
description: Optional[str] = None
artwork_url: Optional[str] = None
feed_url: Optional[str] = None
local_path: Optional[str] = None
show_type: str
episode_count: int = 0
unplayed_count: int = 0
created_at: Optional[str] = None
updated_at: Optional[str] = None
# ── Helpers ──
def _parse_duration(value: str) -> Optional[int]:
"""Parse HH:MM:SS or MM:SS or seconds string to integer seconds."""
if not value:
return None
parts = value.strip().split(":")
try:
if len(parts) == 3:
return int(parts[0]) * 3600 + int(parts[1]) * 60 + int(parts[2])
elif len(parts) == 2:
return int(parts[0]) * 60 + int(parts[1])
else:
return int(float(parts[0]))
except (ValueError, IndexError):
return None
async def _fetch_and_parse_feed(feed_url: str, etag: str = None, last_modified: str = None):
"""Fetch RSS feed and parse with feedparser."""
headers = {}
if etag:
headers["If-None-Match"] = etag
if last_modified:
headers["If-Modified-Since"] = last_modified
async with httpx.AsyncClient(timeout=30, follow_redirects=True) as client:
resp = await client.get(feed_url, headers=headers)
if resp.status_code == 304:
return None, None, None # Not modified
resp.raise_for_status()
feed = feedparser.parse(resp.text)
new_etag = resp.headers.get("ETag")
new_last_modified = resp.headers.get("Last-Modified")
return feed, new_etag, new_last_modified
def _extract_show_info(feed) -> dict:
"""Extract show metadata from parsed feed."""
f = feed.feed
artwork = None
if hasattr(f, "image") and f.image:
artwork = getattr(f.image, "href", None)
if not artwork and hasattr(f, "itunes_image"):
artwork = f.get("itunes_image", {}).get("href") if isinstance(f.get("itunes_image"), dict) else None
# Try another common location
if not artwork:
for link in getattr(f, "links", []):
if link.get("rel") == "icon" or link.get("type", "").startswith("image/"):
artwork = link.get("href")
break
return {
"title": getattr(f, "title", "Unknown Show"),
"author": getattr(f, "author", None) or getattr(f, "itunes_author", None),
"description": getattr(f, "summary", None) or getattr(f, "subtitle", None),
"artwork_url": artwork,
}
def _extract_episodes(feed, show_id: uuid.UUID, user_id: str) -> list[dict]:
"""Extract episodes from parsed feed."""
episodes = []
for entry in feed.entries:
audio_url = None
file_size = None
for enc in getattr(entry, "enclosures", []):
if enc.get("type", "").startswith("audio/") or enc.get("href", "").split("?")[0].endswith(
(".mp3", ".m4a", ".ogg", ".opus")
):
audio_url = enc.get("href")
file_size = int(enc.get("length", 0)) or None
break
# Fallback: check links
if not audio_url:
for link in getattr(entry, "links", []):
if link.get("type", "").startswith("audio/"):
audio_url = link.get("href")
file_size = int(link.get("length", 0)) or None
break
if not audio_url:
continue # Skip entries without audio
# Duration
duration = None
itunes_duration = getattr(entry, "itunes_duration", None)
if itunes_duration:
duration = _parse_duration(str(itunes_duration))
# Published date
published = None
if hasattr(entry, "published_parsed") and entry.published_parsed:
try:
from time import mktime
published = datetime.fromtimestamp(mktime(entry.published_parsed))
except (TypeError, ValueError, OverflowError):
pass
# GUID
guid = getattr(entry, "id", None) or audio_url
# Episode artwork
ep_artwork = None
itunes_image = getattr(entry, "itunes_image", None)
if itunes_image and isinstance(itunes_image, dict):
ep_artwork = itunes_image.get("href")
episodes.append({
"id": uuid.uuid4(),
"show_id": show_id,
"user_id": user_id,
"title": getattr(entry, "title", None),
"description": getattr(entry, "summary", None),
"audio_url": audio_url,
"duration_seconds": duration,
"file_size_bytes": file_size,
"published_at": published,
"guid": guid,
"artwork_url": ep_artwork,
})
return episodes
async def _scan_local_folder(local_path: str, show_id: uuid.UUID, user_id: str) -> list[dict]:
"""Scan a local folder for audio files and create episode dicts."""
import os
from mutagen import File as MutagenFile
from app.config import AUDIO_EXTENSIONS
episodes = []
if not os.path.isdir(local_path):
return episodes
files = sorted(os.listdir(local_path))
for i, fname in enumerate(files):
ext = os.path.splitext(fname)[1].lower()
if ext not in AUDIO_EXTENSIONS:
continue
fpath = os.path.join(local_path, fname)
if not os.path.isfile(fpath):
continue
# Read metadata with mutagen
title = os.path.splitext(fname)[0]
duration = None
file_size = os.path.getsize(fpath)
try:
audio = MutagenFile(fpath)
if audio and audio.info:
duration = int(audio.info.length)
# Try to get title from tags
if audio and audio.tags:
for tag_key in ("title", "TIT2", "\xa9nam"):
tag_val = audio.tags.get(tag_key)
if tag_val:
title = str(tag_val[0]) if isinstance(tag_val, list) else str(tag_val)
break
except Exception:
pass
stat = os.stat(fpath)
published = datetime.fromtimestamp(stat.st_mtime)
episodes.append({
"id": uuid.uuid4(),
"show_id": show_id,
"user_id": user_id,
"title": title,
"description": None,
"audio_url": fpath,
"duration_seconds": duration,
"file_size_bytes": file_size,
"published_at": published,
"guid": f"local:{fpath}",
"artwork_url": None,
})
return episodes
# ── Endpoints ──
@router.get("")
async def list_shows(
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""List user's shows with episode counts and unplayed counts."""
# Subquery: total episodes per show
ep_count_sq = (
select(
Episode.show_id,
func.count(Episode.id).label("episode_count"),
)
.where(Episode.user_id == user_id)
.group_by(Episode.show_id)
.subquery()
)
# Subquery: episodes with completed progress
played_sq = (
select(
Episode.show_id,
func.count(Progress.id).label("played_count"),
)
.join(Progress, Progress.episode_id == Episode.id)
.where(Episode.user_id == user_id, Progress.is_completed == True) # noqa: E712
.group_by(Episode.show_id)
.subquery()
)
stmt = (
select(
Show,
func.coalesce(ep_count_sq.c.episode_count, 0).label("episode_count"),
func.coalesce(played_sq.c.played_count, 0).label("played_count"),
)
.outerjoin(ep_count_sq, ep_count_sq.c.show_id == Show.id)
.outerjoin(played_sq, played_sq.c.show_id == Show.id)
.where(Show.user_id == user_id)
.order_by(Show.title)
)
result = await db.execute(stmt)
rows = result.all()
return [
{
"id": str(show.id),
"title": show.title,
"author": show.author,
"description": show.description,
"artwork_url": show.artwork_url,
"feed_url": show.feed_url,
"local_path": show.local_path,
"show_type": show.show_type,
"episode_count": ep_count,
"unplayed_count": ep_count - played_count,
"created_at": show.created_at.isoformat() if show.created_at else None,
"updated_at": show.updated_at.isoformat() if show.updated_at else None,
}
for show, ep_count, played_count in rows
]
@router.post("", status_code=201)
async def create_show(
body: ShowCreate,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Create a show from RSS feed or local folder."""
if not body.feed_url and not body.local_path:
raise HTTPException(400, "Either feed_url or local_path is required")
show_id = uuid.uuid4()
if body.feed_url:
# RSS podcast
try:
feed, etag, last_modified = await _fetch_and_parse_feed(body.feed_url)
except Exception as e:
log.error("Failed to fetch feed %s: %s", body.feed_url, e)
raise HTTPException(400, f"Failed to fetch feed: {e}")
if feed is None:
raise HTTPException(400, "Feed returned no content")
info = _extract_show_info(feed)
show = Show(
id=show_id,
user_id=user_id,
title=body.title or info["title"],
author=info["author"],
description=info["description"],
artwork_url=info["artwork_url"],
feed_url=body.feed_url,
show_type="podcast",
etag=etag,
last_modified=last_modified,
last_fetched_at=datetime.utcnow(),
)
db.add(show)
await db.flush()
ep_dicts = _extract_episodes(feed, show_id, user_id)
for ep_dict in ep_dicts:
db.add(Episode(**ep_dict))
await db.commit()
await db.refresh(show)
return {
"id": str(show.id),
"title": show.title,
"show_type": show.show_type,
"episode_count": len(ep_dicts),
}
else:
# Local folder
if not body.title:
raise HTTPException(400, "title is required for local shows")
show = Show(
id=show_id,
user_id=user_id,
title=body.title,
local_path=body.local_path,
show_type="local",
last_fetched_at=datetime.utcnow(),
)
db.add(show)
await db.flush()
ep_dicts = await _scan_local_folder(body.local_path, show_id, user_id)
for ep_dict in ep_dicts:
db.add(Episode(**ep_dict))
await db.commit()
await db.refresh(show)
return {
"id": str(show.id),
"title": show.title,
"show_type": show.show_type,
"episode_count": len(ep_dicts),
}
@router.get("/{show_id}")
async def get_show(
show_id: str,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Get show details with episodes."""
show = await db.get(Show, uuid.UUID(show_id))
if not show or show.user_id != user_id:
raise HTTPException(404, "Show not found")
# Fetch episodes with progress
stmt = (
select(Episode, Progress)
.outerjoin(Progress, (Progress.episode_id == Episode.id) & (Progress.user_id == user_id))
.where(Episode.show_id == show.id)
.order_by(Episode.published_at.desc().nullslast())
)
result = await db.execute(stmt)
rows = result.all()
episodes = []
for ep, prog in rows:
episodes.append({
"id": str(ep.id),
"title": ep.title,
"description": ep.description,
"audio_url": ep.audio_url,
"duration_seconds": ep.duration_seconds,
"file_size_bytes": ep.file_size_bytes,
"published_at": ep.published_at.isoformat() if ep.published_at else None,
"artwork_url": ep.artwork_url,
"progress": {
"position_seconds": prog.position_seconds,
"is_completed": prog.is_completed,
"playback_speed": prog.playback_speed,
"last_played_at": prog.last_played_at.isoformat() if prog.last_played_at else None,
} if prog else None,
})
return {
"id": str(show.id),
"title": show.title,
"author": show.author,
"description": show.description,
"artwork_url": show.artwork_url,
"feed_url": show.feed_url,
"local_path": show.local_path,
"show_type": show.show_type,
"last_fetched_at": show.last_fetched_at.isoformat() if show.last_fetched_at else None,
"created_at": show.created_at.isoformat() if show.created_at else None,
"episodes": episodes,
}
@router.delete("/{show_id}", status_code=204)
async def delete_show(
show_id: str,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Delete a show and all its episodes."""
show = await db.get(Show, uuid.UUID(show_id))
if not show or show.user_id != user_id:
raise HTTPException(404, "Show not found")
await db.delete(show)
await db.commit()
@router.post("/{show_id}/refresh")
async def refresh_show(
show_id: str,
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
"""Re-fetch RSS feed or re-scan local folder for new episodes."""
show = await db.get(Show, uuid.UUID(show_id))
if not show or show.user_id != user_id:
raise HTTPException(404, "Show not found")
new_count = 0
if show.show_type == "podcast" and show.feed_url:
try:
feed, etag, last_modified = await _fetch_and_parse_feed(
show.feed_url, show.etag, show.last_modified
)
except Exception as e:
raise HTTPException(400, f"Failed to fetch feed: {e}")
if feed is None:
return {"new_episodes": 0, "message": "Feed not modified"}
info = _extract_show_info(feed)
show.title = info["title"] or show.title
show.author = info["author"] or show.author
show.description = info["description"] or show.description
show.artwork_url = info["artwork_url"] or show.artwork_url
show.etag = etag
show.last_modified = last_modified
show.last_fetched_at = datetime.utcnow()
ep_dicts = _extract_episodes(feed, show.id, user_id)
# Get existing guids
existing = await db.execute(
select(Episode.guid).where(Episode.show_id == show.id)
)
existing_guids = {row[0] for row in existing.all()}
for ep_dict in ep_dicts:
if ep_dict["guid"] not in existing_guids:
db.add(Episode(**ep_dict))
new_count += 1
elif show.show_type == "local" and show.local_path:
ep_dicts = await _scan_local_folder(show.local_path, show.id, user_id)
existing = await db.execute(
select(Episode.guid).where(Episode.show_id == show.id)
)
existing_guids = {row[0] for row in existing.all()}
for ep_dict in ep_dicts:
if ep_dict["guid"] not in existing_guids:
db.add(Episode(**ep_dict))
new_count += 1
show.last_fetched_at = datetime.utcnow()
await db.commit()
return {"new_episodes": new_count}

View File

@@ -0,0 +1,35 @@
"""Media service configuration — all from environment variables."""
import os
# ── Database ──
DATABASE_URL = os.environ.get(
"DATABASE_URL",
"postgresql+asyncpg://brain:brain@brain-db:5432/brain",
)
DATABASE_URL_SYNC = DATABASE_URL.replace("+asyncpg", "")
# ── Redis ──
REDIS_URL = os.environ.get("REDIS_URL", "redis://brain-redis:6379/0")
# ── Local audio ──
LOCAL_AUDIO_PATH = os.environ.get("LOCAL_AUDIO_PATH", "/audiobooks")
# ── Worker ──
FEED_FETCH_INTERVAL = int(os.environ.get("FEED_FETCH_INTERVAL", "1800"))
# ── Service ──
PORT = int(os.environ.get("PORT", "8400"))
DEBUG = os.environ.get("DEBUG", "").lower() in ("1", "true")
# ── Audio extensions ──
AUDIO_EXTENSIONS = {".mp3", ".m4a", ".ogg", ".opus", ".flac"}
# ── Content types ──
CONTENT_TYPES = {
".mp3": "audio/mpeg",
".m4a": "audio/mp4",
".ogg": "audio/ogg",
".opus": "audio/opus",
".flac": "audio/flac",
}

View File

@@ -0,0 +1,18 @@
"""Database session and engine setup."""
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
from sqlalchemy.orm import DeclarativeBase
from app.config import DATABASE_URL
engine = create_async_engine(DATABASE_URL, echo=False, pool_size=10, max_overflow=5)
async_session = async_sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
class Base(DeclarativeBase):
pass
async def get_db() -> AsyncSession:
async with async_session() as session:
yield session

View File

@@ -0,0 +1,87 @@
"""Media service — FastAPI entrypoint."""
import logging
from fastapi import FastAPI, Depends
from sqlalchemy import select, func
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import DEBUG, PORT
logging.basicConfig(
level=logging.DEBUG if DEBUG else logging.INFO,
format="%(asctime)s %(levelname)s %(name)s: %(message)s",
)
app = FastAPI(
title="Media Service",
description="Podcast and local audio management with playback tracking.",
version="1.0.0",
docs_url="/api/docs" if DEBUG else None,
redoc_url=None,
)
@app.on_event("startup")
async def startup():
from app.database import engine, Base
from app.models import Show, Episode, Progress, QueueItem, PlaybackEvent # noqa
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
logging.getLogger(__name__).info("Media service started on port %s", PORT)
# Register routers
from app.api.shows import router as shows_router
from app.api.episodes import router as episodes_router
from app.api.playback import router as playback_router
from app.api.queue import router as queue_router
app.include_router(shows_router)
app.include_router(episodes_router)
app.include_router(playback_router)
app.include_router(queue_router)
@app.get("/api/health")
async def health():
return {"status": "ok", "service": "media"}
from app.api.deps import get_user_id, get_db_session
@app.get("/api/stats")
async def get_stats(
user_id: str = Depends(get_user_id),
db: AsyncSession = Depends(get_db_session),
):
from app.models import Show, Episode, Progress
total_shows = await db.scalar(
select(func.count(Show.id)).where(Show.user_id == user_id)
)
total_episodes = await db.scalar(
select(func.count(Episode.id)).where(Episode.user_id == user_id)
)
total_seconds = await db.scalar(
select(func.coalesce(func.sum(Progress.position_seconds), 0)).where(
Progress.user_id == user_id,
)
)
in_progress = await db.scalar(
select(func.count(Progress.id)).where(
Progress.user_id == user_id,
Progress.position_seconds > 0,
Progress.is_completed == False, # noqa: E712
)
)
return {
"total_shows": total_shows or 0,
"total_episodes": total_episodes or 0,
"total_listened_hours": round((total_seconds or 0) / 3600, 1),
"in_progress": in_progress or 0,
}

View File

@@ -0,0 +1,109 @@
"""SQLAlchemy models for the media service."""
import uuid
from datetime import datetime
from sqlalchemy import (
Column, String, Text, Integer, BigInteger, Boolean, Float,
DateTime, ForeignKey, Index, UniqueConstraint,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
from app.database import Base
class Show(Base):
__tablename__ = "media_shows"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(String(64), nullable=False, index=True)
title = Column(String(500), nullable=False)
author = Column(String(500))
description = Column(Text)
artwork_url = Column(Text)
feed_url = Column(Text)
local_path = Column(Text)
show_type = Column(String(20), nullable=False, default="podcast")
etag = Column(String(255))
last_modified = Column(String(255))
last_fetched_at = Column(DateTime)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
episodes = relationship("Episode", back_populates="show", cascade="all, delete-orphan")
class Episode(Base):
__tablename__ = "media_episodes"
__table_args__ = (
UniqueConstraint("show_id", "guid", name="uq_media_episodes_show_guid"),
Index("idx_media_episodes_show", "show_id"),
Index("idx_media_episodes_published", "published_at"),
)
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
show_id = Column(UUID(as_uuid=True), ForeignKey("media_shows.id", ondelete="CASCADE"))
user_id = Column(String(64), nullable=False)
title = Column(String(1000))
description = Column(Text)
audio_url = Column(Text)
duration_seconds = Column(Integer)
file_size_bytes = Column(BigInteger)
published_at = Column(DateTime)
guid = Column(String(500))
artwork_url = Column(Text)
is_downloaded = Column(Boolean, default=False)
created_at = Column(DateTime, default=datetime.utcnow)
show = relationship("Show", back_populates="episodes")
progress = relationship("Progress", back_populates="episode", uselist=False, cascade="all, delete-orphan")
class Progress(Base):
__tablename__ = "media_progress"
__table_args__ = (
UniqueConstraint("user_id", "episode_id", name="uq_media_progress_user_episode"),
Index("idx_media_progress_user", "user_id"),
)
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(String(64), nullable=False)
episode_id = Column(UUID(as_uuid=True), ForeignKey("media_episodes.id", ondelete="CASCADE"))
position_seconds = Column(Float, default=0)
duration_seconds = Column(Integer)
is_completed = Column(Boolean, default=False)
playback_speed = Column(Float, default=1.0)
last_played_at = Column(DateTime, default=datetime.utcnow)
episode = relationship("Episode", back_populates="progress")
class QueueItem(Base):
__tablename__ = "media_queue"
__table_args__ = (
UniqueConstraint("user_id", "episode_id", name="uq_media_queue_user_episode"),
Index("idx_media_queue_user_order", "user_id", "sort_order"),
)
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(String(64), nullable=False)
episode_id = Column(UUID(as_uuid=True), ForeignKey("media_episodes.id", ondelete="CASCADE"))
sort_order = Column(Integer, nullable=False, default=0)
added_at = Column(DateTime, default=datetime.utcnow)
episode = relationship("Episode")
class PlaybackEvent(Base):
__tablename__ = "media_playback_events"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(String(64), nullable=False)
episode_id = Column(UUID(as_uuid=True), ForeignKey("media_episodes.id", ondelete="CASCADE"))
event_type = Column(String(20), nullable=False)
position_seconds = Column(Float)
playback_speed = Column(Float, default=1.0)
created_at = Column(DateTime, default=datetime.utcnow)
episode = relationship("Episode")

View File

View File

@@ -0,0 +1,298 @@
"""Worker tasks — feed fetching, local folder scanning, scheduling loop."""
from __future__ import annotations
import logging
import os
import time
import uuid
from datetime import datetime
from time import mktime
import feedparser
import httpx
from sqlalchemy import create_engine, select, text
from sqlalchemy.orm import Session, sessionmaker
from app.config import DATABASE_URL_SYNC, FEED_FETCH_INTERVAL, AUDIO_EXTENSIONS
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(levelname)s %(name)s: %(message)s",
)
log = logging.getLogger(__name__)
# Sync engine for worker
engine = create_engine(DATABASE_URL_SYNC, pool_size=5, max_overflow=2)
SessionLocal = sessionmaker(bind=engine)
def _parse_duration(value: str) -> int | None:
if not value:
return None
parts = value.strip().split(":")
try:
if len(parts) == 3:
return int(parts[0]) * 3600 + int(parts[1]) * 60 + int(parts[2])
elif len(parts) == 2:
return int(parts[0]) * 60 + int(parts[1])
else:
return int(float(parts[0]))
except (ValueError, IndexError):
return None
def fetch_feed(feed_url: str, etag: str = None, last_modified: str = None):
"""Fetch and parse an RSS feed synchronously."""
headers = {}
if etag:
headers["If-None-Match"] = etag
if last_modified:
headers["If-Modified-Since"] = last_modified
with httpx.Client(timeout=30, follow_redirects=True) as client:
resp = client.get(feed_url, headers=headers)
if resp.status_code == 304:
return None, None, None
resp.raise_for_status()
feed = feedparser.parse(resp.text)
new_etag = resp.headers.get("ETag")
new_lm = resp.headers.get("Last-Modified")
return feed, new_etag, new_lm
def refresh_rss_show(session: Session, show_id: str, feed_url: str, etag: str, last_modified: str, user_id: str):
"""Refresh a single RSS show, inserting new episodes."""
try:
feed, new_etag, new_lm = fetch_feed(feed_url, etag, last_modified)
except Exception as e:
log.error("Failed to fetch feed %s: %s", feed_url, e)
return 0
if feed is None:
log.debug("Feed %s not modified", feed_url)
return 0
# Update show metadata
session.execute(
text("""
UPDATE media_shows
SET etag = :etag, last_modified = :lm, last_fetched_at = NOW()
WHERE id = :id
"""),
{"etag": new_etag, "lm": new_lm, "id": show_id},
)
# Get existing guids
rows = session.execute(
text("SELECT guid FROM media_episodes WHERE show_id = :sid"),
{"sid": show_id},
).fetchall()
existing_guids = {r[0] for r in rows}
new_count = 0
for entry in feed.entries:
audio_url = None
file_size = None
for enc in getattr(entry, "enclosures", []):
if enc.get("type", "").startswith("audio/") or enc.get("href", "").split("?")[0].endswith(
(".mp3", ".m4a", ".ogg", ".opus")
):
audio_url = enc.get("href")
file_size = int(enc.get("length", 0)) or None
break
if not audio_url:
for link in getattr(entry, "links", []):
if link.get("type", "").startswith("audio/"):
audio_url = link.get("href")
file_size = int(link.get("length", 0)) or None
break
if not audio_url:
continue
guid = getattr(entry, "id", None) or audio_url
if guid in existing_guids:
continue
duration = None
itunes_duration = getattr(entry, "itunes_duration", None)
if itunes_duration:
duration = _parse_duration(str(itunes_duration))
published = None
if hasattr(entry, "published_parsed") and entry.published_parsed:
try:
published = datetime.fromtimestamp(mktime(entry.published_parsed))
except (TypeError, ValueError, OverflowError):
pass
ep_artwork = None
itunes_image = getattr(entry, "itunes_image", None)
if itunes_image and isinstance(itunes_image, dict):
ep_artwork = itunes_image.get("href")
session.execute(
text("""
INSERT INTO media_episodes
(id, show_id, user_id, title, description, audio_url,
duration_seconds, file_size_bytes, published_at, guid, artwork_url)
VALUES
(:id, :show_id, :user_id, :title, :desc, :audio_url,
:dur, :size, :pub, :guid, :art)
ON CONFLICT (show_id, guid) DO NOTHING
"""),
{
"id": str(uuid.uuid4()),
"show_id": show_id,
"user_id": user_id,
"title": getattr(entry, "title", None),
"desc": getattr(entry, "summary", None),
"audio_url": audio_url,
"dur": duration,
"size": file_size,
"pub": published,
"guid": guid,
"art": ep_artwork,
},
)
new_count += 1
session.commit()
if new_count:
log.info("Show %s: added %d new episodes", show_id, new_count)
return new_count
def refresh_local_show(session: Session, show_id: str, local_path: str, user_id: str):
"""Re-scan a local folder for new audio files."""
if not os.path.isdir(local_path):
log.warning("Local path does not exist: %s", local_path)
return 0
rows = session.execute(
text("SELECT guid FROM media_episodes WHERE show_id = :sid"),
{"sid": show_id},
).fetchall()
existing_guids = {r[0] for r in rows}
new_count = 0
for fname in sorted(os.listdir(local_path)):
ext = os.path.splitext(fname)[1].lower()
if ext not in AUDIO_EXTENSIONS:
continue
fpath = os.path.join(local_path, fname)
if not os.path.isfile(fpath):
continue
guid = f"local:{fpath}"
if guid in existing_guids:
continue
title = os.path.splitext(fname)[0]
duration = None
file_size = os.path.getsize(fpath)
try:
from mutagen import File as MutagenFile
audio = MutagenFile(fpath)
if audio and audio.info:
duration = int(audio.info.length)
if audio and audio.tags:
for tag_key in ("title", "TIT2", "\xa9nam"):
tag_val = audio.tags.get(tag_key)
if tag_val:
title = str(tag_val[0]) if isinstance(tag_val, list) else str(tag_val)
break
except Exception:
pass
stat = os.stat(fpath)
published = datetime.fromtimestamp(stat.st_mtime)
session.execute(
text("""
INSERT INTO media_episodes
(id, show_id, user_id, title, audio_url,
duration_seconds, file_size_bytes, published_at, guid)
VALUES
(:id, :show_id, :user_id, :title, :audio_url,
:dur, :size, :pub, :guid)
ON CONFLICT (show_id, guid) DO NOTHING
"""),
{
"id": str(uuid.uuid4()),
"show_id": show_id,
"user_id": user_id,
"title": title,
"audio_url": fpath,
"dur": duration,
"size": file_size,
"pub": published,
"guid": guid,
},
)
new_count += 1
if new_count:
session.execute(
text("UPDATE media_shows SET last_fetched_at = NOW() WHERE id = :id"),
{"id": show_id},
)
session.commit()
log.info("Local show %s: added %d new files", show_id, new_count)
return new_count
def refresh_all_shows():
"""Refresh all shows — RSS and local."""
session = SessionLocal()
try:
rows = session.execute(
text("SELECT id, user_id, feed_url, local_path, show_type, etag, last_modified FROM media_shows")
).fetchall()
total_new = 0
for row in rows:
sid, uid, feed_url, local_path, show_type, etag, lm = row
if show_type == "podcast" and feed_url:
total_new += refresh_rss_show(session, str(sid), feed_url, etag, lm, uid)
elif show_type == "local" and local_path:
total_new += refresh_local_show(session, str(sid), local_path, uid)
if total_new:
log.info("Feed refresh complete: %d new episodes total", total_new)
except Exception:
log.exception("Error during feed refresh")
session.rollback()
finally:
session.close()
def run_scheduler():
"""Simple scheduler loop — refresh feeds at configured interval."""
log.info("Media worker started — refresh interval: %ds", FEED_FETCH_INTERVAL)
# Wait for DB to be ready
for attempt in range(10):
try:
with engine.connect() as conn:
conn.execute(text("SELECT 1"))
break
except Exception:
log.info("Waiting for database... (attempt %d)", attempt + 1)
time.sleep(3)
while True:
try:
refresh_all_shows()
except Exception:
log.exception("Scheduler loop error")
time.sleep(FEED_FETCH_INTERVAL)
if __name__ == "__main__":
run_scheduler()

View File

@@ -0,0 +1,44 @@
services:
media-api:
build:
context: .
dockerfile: Dockerfile.api
container_name: media-api
restart: unless-stopped
volumes:
- /media/yusiboyz/Media/Audiobooks:/audiobooks:ro
environment:
- DATABASE_URL=postgresql+asyncpg://brain:brain@brain-db:5432/brain
- REDIS_URL=redis://brain-redis:6379/0
- LOCAL_AUDIO_PATH=/audiobooks
- PORT=8400
- TZ=${TZ:-America/Chicago}
networks:
- default
- pangolin
- brain
media-worker:
build:
context: .
dockerfile: Dockerfile.worker
container_name: media-worker
restart: unless-stopped
volumes:
- /media/yusiboyz/Media/Audiobooks:/audiobooks:ro
environment:
- DATABASE_URL=postgresql+asyncpg://brain:brain@brain-db:5432/brain
- REDIS_URL=redis://brain-redis:6379/0
- LOCAL_AUDIO_PATH=/audiobooks
- FEED_FETCH_INTERVAL=1800
- TZ=${TZ:-America/Chicago}
networks:
- default
- brain
networks:
pangolin:
external: true
brain:
name: brain_default
external: true

View File

@@ -0,0 +1,12 @@
fastapi==0.115.0
uvicorn[standard]==0.32.0
sqlalchemy[asyncio]==2.0.35
asyncpg==0.30.0
psycopg2-binary==2.9.10
pydantic==2.10.0
httpx==0.28.0
feedparser==6.0.11
redis==5.2.0
rq==2.1.0
python-dateutil==2.9.0
mutagen==1.47.0