feat: implement pngx-controller with Gitea CI/CD deployment
All checks were successful
Deploy / deploy (push) Successful in 30s

- Full FastAPI sync engine: master→replica document sync via paperless REST API
- Web UI: dashboard, replicas, logs, settings (Jinja2 + HTMX + Pico CSS)
- APScheduler background sync, SSE live log stream, Prometheus metrics
- Fernet encryption for API tokens at rest
- pngx.env credential file: written on save, pre-fills forms on load
- Dockerfile with layer-cached uv build, Python healthcheck
- docker-compose with host networking for Tailscale access
- Gitea Actions workflow: version bump, secret injection, docker compose deploy

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-22 17:59:25 +01:00
parent 942482daab
commit b99dbf694d
40 changed files with 4184 additions and 0 deletions

0
app/api/__init__.py Normal file
View File

109
app/api/logs.py Normal file
View File

@@ -0,0 +1,109 @@
import asyncio
import json
from typing import Optional
from fastapi import APIRouter, Depends, Query
from fastapi.responses import StreamingResponse
from sqlalchemy import text
from sqlmodel import Session, select
from ..database import get_session
from ..logger import subscribe_sse, unsubscribe_sse
from ..models import Log
router = APIRouter(prefix="/api/logs", tags=["logs"])
@router.get("")
def list_logs(
replica_id: Optional[int] = Query(None),
level: Optional[str] = Query(None),
from_dt: Optional[str] = Query(None, alias="from"),
to_dt: Optional[str] = Query(None, alias="to"),
q: Optional[str] = Query(None),
page: int = Query(1, ge=1),
page_size: int = Query(50, ge=1, le=200),
session: Session = Depends(get_session),
):
if q:
# FTS5 search — use SQLAlchemy execute() for raw SQL
fts_sql = text(
"SELECT l.id, l.run_id, l.replica_id, l.level, l.message, l.doc_id, l.created_at "
"FROM logs l JOIN logs_fts f ON l.id = f.rowid "
"WHERE logs_fts MATCH :q ORDER BY l.created_at DESC LIMIT :lim OFFSET :off"
)
offset = (page - 1) * page_size
rows = session.execute(fts_sql, {"q": q, "lim": page_size, "off": offset}).all()
return [dict(r._mapping) for r in rows]
stmt = select(Log)
if replica_id is not None:
stmt = stmt.where(Log.replica_id == replica_id)
if level:
stmt = stmt.where(Log.level == level)
if from_dt:
stmt = stmt.where(Log.created_at >= from_dt)
if to_dt:
stmt = stmt.where(Log.created_at <= to_dt)
stmt = stmt.order_by(Log.created_at.desc()) # type: ignore[attr-defined]
stmt = stmt.offset((page - 1) * page_size).limit(page_size)
logs = session.exec(stmt).all()
return [
{
"id": l.id,
"run_id": l.run_id,
"replica_id": l.replica_id,
"level": l.level,
"message": l.message,
"doc_id": l.doc_id,
"created_at": l.created_at.isoformat() if l.created_at else None,
}
for l in logs
]
@router.delete("")
def clear_logs(
older_than_days: int = Query(90, ge=1),
session: Session = Depends(get_session),
):
from datetime import datetime, timedelta, timezone
cutoff = datetime.now(timezone.utc) - timedelta(days=older_than_days)
old = session.exec(select(Log).where(Log.created_at < cutoff)).all()
count = len(old)
for log in old:
session.delete(log)
session.commit()
return {"deleted": count}
@router.get("/stream")
async def log_stream():
"""SSE endpoint for live log tail."""
async def generator():
q = subscribe_sse()
try:
yield "retry: 3000\n\n"
while True:
try:
data = await asyncio.wait_for(q.get(), timeout=30.0)
yield f"data: {data}\n\n"
except asyncio.TimeoutError:
# Send keepalive comment
yield ": keepalive\n\n"
except asyncio.CancelledError:
pass
finally:
unsubscribe_sse(q)
return StreamingResponse(
generator(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"X-Accel-Buffering": "no",
},
)

196
app/api/replicas.py Normal file
View File

@@ -0,0 +1,196 @@
import asyncio
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from sqlmodel import Session, select
from ..config import get_config
from ..crypto import decrypt, encrypt
from ..database import get_session
from ..models import Replica, SyncMap
from ..sync.paperless import PaperlessClient
router = APIRouter(prefix="/api/replicas", tags=["replicas"])
class ReplicaCreate(BaseModel):
name: str
url: str
api_token: str
enabled: bool = True
sync_interval_seconds: Optional[int] = None
class ReplicaUpdate(BaseModel):
name: Optional[str] = None
url: Optional[str] = None
api_token: Optional[str] = None
enabled: Optional[bool] = None
sync_interval_seconds: Optional[int] = None
def _serialize(r: Replica) -> dict:
return {
"id": r.id,
"name": r.name,
"url": r.url,
"enabled": r.enabled,
"sync_interval_seconds": r.sync_interval_seconds,
"last_sync_ts": r.last_sync_ts.isoformat() if r.last_sync_ts else None,
"consecutive_failures": r.consecutive_failures,
"suspended_at": r.suspended_at.isoformat() if r.suspended_at else None,
"created_at": r.created_at.isoformat() if r.created_at else None,
}
async def _test_conn(url: str, token: str) -> dict:
sem = asyncio.Semaphore(1)
async with PaperlessClient(url, token, sem) as client:
return await client.test_connection()
@router.get("")
def list_replicas(session: Session = Depends(get_session)):
return [_serialize(r) for r in session.exec(select(Replica)).all()]
@router.post("", status_code=201)
async def create_replica(
body: ReplicaCreate, session: Session = Depends(get_session)
):
result = await _test_conn(body.url, body.api_token)
if not result["ok"]:
raise HTTPException(422, detail=f"Connection test failed: {result['error']}")
config = get_config()
encrypted_token = encrypt(body.api_token, config.secret_key)
replica = Replica(
name=body.name,
url=body.url,
api_token=encrypted_token,
enabled=body.enabled,
sync_interval_seconds=body.sync_interval_seconds,
)
session.add(replica)
session.commit()
session.refresh(replica)
from .. import envfile
url_key, token_key = envfile.replica_keys(replica.name)
envfile.write({url_key: replica.url, token_key: body.api_token})
response = _serialize(replica)
response["doc_count"] = result["doc_count"]
return response
@router.put("/{replica_id}")
async def update_replica(
replica_id: int,
body: ReplicaUpdate,
session: Session = Depends(get_session),
):
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
config = get_config()
url_changed = body.url is not None and body.url != replica.url
token_changed = body.api_token is not None
if url_changed or token_changed:
new_url = body.url or replica.url
new_token = body.api_token or decrypt(replica.api_token, config.secret_key)
result = await _test_conn(new_url, new_token)
if not result["ok"]:
raise HTTPException(422, detail=f"Connection test failed: {result['error']}")
if body.name is not None:
replica.name = body.name
if body.url is not None:
replica.url = body.url
if body.api_token is not None:
replica.api_token = encrypt(body.api_token, config.secret_key)
if body.enabled is not None:
replica.enabled = body.enabled
if body.sync_interval_seconds is not None:
replica.sync_interval_seconds = body.sync_interval_seconds
session.add(replica)
session.commit()
session.refresh(replica)
from .. import envfile
url_key, token_key = envfile.replica_keys(replica.name)
env_write: dict[str, str] = {url_key: replica.url}
if body.api_token:
env_write[token_key] = body.api_token
envfile.write(env_write)
return _serialize(replica)
@router.delete("/{replica_id}", status_code=204)
def delete_replica(replica_id: int, session: Session = Depends(get_session)):
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
# Explicitly delete sync_map rows before the replica (SQLite FK cascade)
for entry in session.exec(select(SyncMap).where(SyncMap.replica_id == replica_id)).all():
session.delete(entry)
session.delete(replica)
session.commit()
@router.post("/{replica_id}/test")
async def test_replica(replica_id: int, session: Session = Depends(get_session)):
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
config = get_config()
token = decrypt(replica.api_token, config.secret_key)
return await _test_conn(replica.url, token)
@router.post("/{replica_id}/reconcile")
async def reconcile_replica(replica_id: int, session: Session = Depends(get_session)):
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
from ..sync.reconcile import run_reconcile
result = await run_reconcile(replica_id)
return result
@router.post("/{replica_id}/unsuspend")
def unsuspend_replica(replica_id: int, session: Session = Depends(get_session)):
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
replica.suspended_at = None
replica.consecutive_failures = 0
session.add(replica)
session.commit()
return _serialize(replica)
@router.post("/{replica_id}/resync")
async def resync_replica(replica_id: int, session: Session = Depends(get_session)):
"""Phase 3: wipe sync_map and trigger full resync."""
replica = session.get(Replica, replica_id)
if not replica:
raise HTTPException(404)
# Delete all sync_map entries for this replica
entries = session.exec(
select(SyncMap).where(SyncMap.replica_id == replica_id)
).all()
for e in entries:
session.delete(e)
session.commit()
# Trigger sync
from ..sync.engine import run_sync_cycle
started = await run_sync_cycle(triggered_by="manual", replica_id=replica_id)
return {"started": started}

180
app/api/settings.py Normal file
View File

@@ -0,0 +1,180 @@
import asyncio
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from sqlmodel import Session, select
from ..config import get_config
from ..crypto import decrypt, encrypt
from ..database import get_session
from ..models import Setting
from ..scheduler import SETTINGS_DEFAULTS
router = APIRouter(prefix="/api/settings", tags=["settings"])
ENCRYPTED_KEYS = {"master_token", "alert_target_token"}
def _get_all_settings(session: Session) -> dict:
rows = session.exec(select(Setting)).all()
result = dict(SETTINGS_DEFAULTS)
for row in rows:
if row.value is not None:
result[row.key] = row.value
return result
def _safe_settings(settings: dict) -> dict:
"""Return settings with encrypted values masked."""
out = dict(settings)
for k in ENCRYPTED_KEYS:
if out.get(k):
out[k] = "••••••••"
return out
@router.get("")
def get_settings(session: Session = Depends(get_session)):
return _safe_settings(_get_all_settings(session))
class SettingsUpdate(BaseModel):
master_url: str | None = None
master_token: str | None = None
sync_interval_seconds: int | None = None
log_retention_days: int | None = None
sync_cycle_timeout_seconds: int | None = None
task_poll_timeout_seconds: int | None = None
replica_suspend_threshold: int | None = None
max_concurrent_requests: int | None = None
alert_target_type: str | None = None
alert_target_url: str | None = None
alert_target_token: str | None = None
alert_error_threshold: int | None = None
alert_cooldown_seconds: int | None = None
@router.put("")
async def update_settings(
body: SettingsUpdate,
session: Session = Depends(get_session),
):
config = get_config()
updates = body.model_dump(exclude_none=True)
# Validate master connection if URL or token changed
current = _get_all_settings(session)
if "master_url" in updates or "master_token" in updates:
new_url = updates.get("master_url") or current.get("master_url", "")
new_token = updates.get("master_token")
if not new_token:
enc = current.get("master_token", "")
new_token = decrypt(enc, config.secret_key) if enc else ""
if new_url and new_token:
import httpx as _httpx
try:
async with _httpx.AsyncClient(
headers={"Authorization": f"Token {new_token}"},
timeout=10.0,
) as _client:
_r = await _client.get(
new_url.rstrip("/") + "/api/documents/",
params={"page_size": 1},
)
_r.raise_for_status()
except Exception as _e:
raise HTTPException(
422,
detail=f"Master connection test failed: {_e}",
)
# Capture plaintext values for envfile before encryption
env_updates: dict[str, str] = {}
if "master_url" in updates:
env_updates["MASTER_URL"] = str(updates["master_url"])
if "master_token" in updates and updates["master_token"]:
env_updates["MASTER_TOKEN"] = str(updates["master_token"])
# Persist updates
for key, value in updates.items():
if key in ENCRYPTED_KEYS and value:
value = encrypt(str(value), config.secret_key)
setting = session.get(Setting, key)
if setting:
setting.value = str(value)
else:
setting = Setting(key=key, value=str(value))
session.add(setting)
session.commit()
if env_updates:
from .. import envfile
envfile.write(env_updates)
# Reschedule if interval changed
if "sync_interval_seconds" in updates:
from ..scheduler import reschedule
reschedule(int(updates["sync_interval_seconds"]))
return _safe_settings(_get_all_settings(session))
class ConnectionTestRequest(BaseModel):
url: str
token: str = "" # blank = use saved master token
@router.post("/test")
async def test_connection(
body: ConnectionTestRequest,
session: Session = Depends(get_session),
):
"""Test a connection using the provided URL and token (does not save).
If token is blank, falls back to the saved master_token."""
import httpx
import time
config = get_config()
token = body.token.strip()
if not token:
settings = _get_all_settings(session)
enc = settings.get("master_token", "")
token = decrypt(enc, config.secret_key) if enc else ""
if not token:
return {"ok": False, "error": "No token provided and no saved token found", "latency_ms": 0, "doc_count": 0}
t0 = time.monotonic()
try:
async with httpx.AsyncClient(
headers={"Authorization": f"Token {token}"},
timeout=10.0,
) as client:
r = await client.get(
body.url.rstrip("/") + "/api/documents/",
params={"page_size": 1},
)
r.raise_for_status()
elapsed = int((time.monotonic() - t0) * 1000)
data = r.json()
return {"ok": True, "error": None, "latency_ms": elapsed, "doc_count": data.get("count", 0)}
except Exception as e:
return {"ok": False, "error": str(e), "latency_ms": 0, "doc_count": 0}
@router.get("/status")
async def master_status(session: Session = Depends(get_session)):
"""Test the currently saved master connection."""
config = get_config()
settings = _get_all_settings(session)
master_url = settings.get("master_url", "")
master_token_enc = settings.get("master_token", "")
if not master_url or not master_token_enc:
return {"ok": False, "error": "Not configured", "latency_ms": 0, "doc_count": 0}
master_token = decrypt(master_token_enc, config.secret_key)
from ..sync.paperless import PaperlessClient
sem = asyncio.Semaphore(1)
async with PaperlessClient(master_url, master_token, sem) as client:
return await client.test_connection()

88
app/api/status.py Normal file
View File

@@ -0,0 +1,88 @@
"""Dashboard status endpoint."""
from datetime import datetime, timezone
from fastapi import APIRouter, Depends
from sqlmodel import Session, select
from ..database import get_session
from ..models import Replica, SyncRun
from ..sync.engine import get_progress
router = APIRouter(prefix="/api", tags=["status"])
@router.get("/status")
def get_status(session: Session = Depends(get_session)):
replicas = session.exec(select(Replica)).all()
progress = get_progress()
now = datetime.now(timezone.utc)
replica_data = []
for r in replicas:
lag = None
if r.last_sync_ts:
ts = r.last_sync_ts
if ts.tzinfo is None:
ts = ts.replace(tzinfo=timezone.utc)
lag = int((now - ts).total_seconds())
if r.suspended_at:
status = "suspended"
elif progress.running and progress.phase and r.name in progress.phase:
status = "syncing"
elif r.consecutive_failures > 0:
status = "error"
elif r.last_sync_ts:
status = "synced"
else:
status = "pending"
# Last run stats for this replica
last_run = session.exec(
select(SyncRun)
.where(SyncRun.replica_id == r.id)
.order_by(SyncRun.started_at.desc()) # type: ignore[attr-defined]
.limit(1)
).first()
replica_data.append(
{
"id": r.id,
"name": r.name,
"url": r.url,
"enabled": r.enabled,
"status": status,
"lag_seconds": lag,
"last_sync_ts": r.last_sync_ts.isoformat() if r.last_sync_ts else None,
"consecutive_failures": r.consecutive_failures,
"suspended": r.suspended_at is not None,
"docs_synced_last_run": last_run.docs_synced if last_run else 0,
"docs_failed_last_run": last_run.docs_failed if last_run else 0,
}
)
last_run = session.exec(
select(SyncRun)
.order_by(SyncRun.started_at.desc()) # type: ignore[attr-defined]
.limit(1)
).first()
return {
"replicas": replica_data,
"sync_progress": {
"running": progress.running,
"phase": progress.phase,
"docs_done": progress.docs_done,
"docs_total": progress.docs_total,
},
"last_sync_run": {
"id": last_run.id,
"started_at": last_run.started_at.isoformat() if last_run and last_run.started_at else None,
"finished_at": last_run.finished_at.isoformat() if last_run and last_run.finished_at else None,
"docs_synced": last_run.docs_synced if last_run else 0,
"docs_failed": last_run.docs_failed if last_run else 0,
"timed_out": last_run.timed_out if last_run else False,
}
if last_run
else None,
}

29
app/api/sync.py Normal file
View File

@@ -0,0 +1,29 @@
from fastapi import APIRouter
from fastapi.responses import JSONResponse
from ..sync.engine import get_progress, run_sync_cycle
router = APIRouter(prefix="/api/sync", tags=["sync"])
@router.post("")
async def trigger_sync(replica_id: int | None = None):
started = await run_sync_cycle(
triggered_by="manual",
replica_id=replica_id,
)
return JSONResponse(
status_code=202,
content={"started": started, "message": "Sync triggered" if started else "Already running"},
)
@router.get("/running")
def sync_running():
p = get_progress()
return {
"running": p.running,
"phase": p.phase,
"docs_done": p.docs_done,
"docs_total": p.docs_total,
}