fix: resync clears last_sync_ts; add live doc counts to dashboard
All checks were successful
Deploy / deploy (push) Successful in 14s
All checks were successful
Deploy / deploy (push) Successful in 14s
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -178,10 +178,13 @@ def unsuspend_replica(replica_id: int, session: Session = Depends(get_session)):
|
||||
|
||||
@router.post("/{replica_id}/resync")
|
||||
async def resync_replica(replica_id: int, session: Session = Depends(get_session)):
|
||||
"""Phase 3: wipe sync_map and trigger full resync."""
|
||||
"""Wipe sync_map, reset last_sync_ts, and trigger a full resync."""
|
||||
replica = session.get(Replica, replica_id)
|
||||
if not replica:
|
||||
raise HTTPException(404)
|
||||
# Reset last_sync_ts so the sync fetches ALL master documents
|
||||
replica.last_sync_ts = None
|
||||
session.add(replica)
|
||||
# Delete all sync_map entries for this replica
|
||||
entries = session.exec(
|
||||
select(SyncMap).where(SyncMap.replica_id == replica_id)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
"""Dashboard status endpoint."""
|
||||
import asyncio
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlmodel import Session, select
|
||||
|
||||
from ..database import get_session
|
||||
from ..models import Replica, SyncRun
|
||||
from ..models import Replica, Setting, SyncRun
|
||||
from ..sync.engine import get_progress
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["status"])
|
||||
@@ -86,3 +87,52 @@ def get_status(session: Session = Depends(get_session)):
|
||||
if last_run
|
||||
else None,
|
||||
}
|
||||
|
||||
|
||||
async def _fetch_count(url: str, token: str) -> int | None:
|
||||
import httpx
|
||||
try:
|
||||
async with httpx.AsyncClient(
|
||||
headers={"Authorization": f"Token {token}"}, timeout=8.0
|
||||
) as client:
|
||||
r = await client.get(url.rstrip("/") + "/api/documents/", params={"page_size": 1})
|
||||
r.raise_for_status()
|
||||
return r.json().get("count")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
@router.get("/doc-counts")
|
||||
async def doc_counts(session: Session = Depends(get_session)):
|
||||
"""Live document counts from master and all replicas (parallel fetch)."""
|
||||
from ..config import get_config
|
||||
from ..crypto import decrypt
|
||||
from ..scheduler import SETTINGS_DEFAULTS
|
||||
|
||||
config = get_config()
|
||||
rows = session.exec(select(Setting)).all()
|
||||
settings = dict(SETTINGS_DEFAULTS)
|
||||
for row in rows:
|
||||
if row.value is not None:
|
||||
settings[row.key] = row.value
|
||||
|
||||
master_url = settings.get("master_url", "")
|
||||
master_token_enc = settings.get("master_token", "")
|
||||
master_token = decrypt(master_token_enc, config.secret_key) if master_token_enc else ""
|
||||
|
||||
replicas = session.exec(select(Replica)).all()
|
||||
|
||||
# Build tasks: (label, url, token)
|
||||
tasks = []
|
||||
if master_url and master_token:
|
||||
tasks.append(("__master__", master_url, master_token))
|
||||
for r in replicas:
|
||||
token = decrypt(r.api_token, config.secret_key)
|
||||
tasks.append((str(r.id), r.url, token))
|
||||
|
||||
counts_raw = await asyncio.gather(*[_fetch_count(url, tok) for _, url, tok in tasks])
|
||||
|
||||
result = {}
|
||||
for (label, _, _), count in zip(tasks, counts_raw):
|
||||
result[label] = count
|
||||
return result
|
||||
|
||||
@@ -53,6 +53,14 @@
|
||||
</details>
|
||||
{% endif %}
|
||||
|
||||
<!-- Live doc counts -->
|
||||
<div id="doc-counts"
|
||||
hx-get="/ui/doc-counts"
|
||||
hx-trigger="load, every 120s"
|
||||
hx-swap="innerHTML">
|
||||
<small class="muted">Loading document counts…</small>
|
||||
</div>
|
||||
|
||||
<!-- Replica table -->
|
||||
<h3>Replicas</h3>
|
||||
{% if replica_rows %}
|
||||
|
||||
@@ -40,6 +40,51 @@ def _lag_str(ts: datetime | None) -> str:
|
||||
return f"{seconds // 86400}d ago"
|
||||
|
||||
|
||||
@router.get("/ui/doc-counts", response_class=HTMLResponse)
|
||||
async def doc_counts_fragment(request: Request, session: Session = Depends(get_session)):
|
||||
from .. import envfile
|
||||
from ..api.status import _fetch_count, doc_counts as _doc_counts
|
||||
from ..config import get_config
|
||||
from ..crypto import decrypt
|
||||
from ..models import Setting
|
||||
from ..scheduler import SETTINGS_DEFAULTS
|
||||
import asyncio
|
||||
|
||||
config = get_config()
|
||||
rows = session.exec(select(Setting)).all()
|
||||
settings = dict(SETTINGS_DEFAULTS)
|
||||
for row in rows:
|
||||
if row.value is not None:
|
||||
settings[row.key] = row.value
|
||||
|
||||
master_url = settings.get("master_url", "")
|
||||
master_token_enc = settings.get("master_token", "")
|
||||
master_token = decrypt(master_token_enc, config.secret_key) if master_token_enc else ""
|
||||
|
||||
replicas = session.exec(select(Replica)).all()
|
||||
|
||||
tasks = []
|
||||
if master_url and master_token:
|
||||
tasks.append(("Master", master_url, master_token))
|
||||
for r in replicas:
|
||||
token = decrypt(r.api_token, config.secret_key)
|
||||
tasks.append((r.name, r.url, token))
|
||||
|
||||
counts_raw = await asyncio.gather(*[_fetch_count(url, tok) for _, url, tok in tasks])
|
||||
|
||||
parts = []
|
||||
for (label, _, _), count in zip(tasks, counts_raw):
|
||||
val = str(count) if count is not None else "?"
|
||||
parts.append(f"<span><strong>{label}:</strong> {val} docs</span>")
|
||||
|
||||
html = (
|
||||
'<div style="display:flex; gap:1.5rem; flex-wrap:wrap; margin-bottom:1rem; font-size:0.95em;">'
|
||||
+ " · ".join(parts)
|
||||
+ "</div>"
|
||||
) if parts else ""
|
||||
return HTMLResponse(html)
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
def dashboard(request: Request, session: Session = Depends(get_session)):
|
||||
replicas = session.exec(select(Replica)).all()
|
||||
|
||||
Reference in New Issue
Block a user