Files
pngx-sync/app/database.py
domverse b99dbf694d
All checks were successful
Deploy / deploy (push) Successful in 30s
feat: implement pngx-controller with Gitea CI/CD deployment
- Full FastAPI sync engine: master→replica document sync via paperless REST API
- Web UI: dashboard, replicas, logs, settings (Jinja2 + HTMX + Pico CSS)
- APScheduler background sync, SSE live log stream, Prometheus metrics
- Fernet encryption for API tokens at rest
- pngx.env credential file: written on save, pre-fills forms on load
- Dockerfile with layer-cached uv build, Python healthcheck
- docker-compose with host networking for Tailscale access
- Gitea Actions workflow: version bump, secret injection, docker compose deploy

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-22 17:59:25 +01:00

60 lines
1.9 KiB
Python

from sqlalchemy import event
from sqlmodel import Session, SQLModel, create_engine
from .config import get_config
_engine = None
def get_engine():
global _engine
if _engine is None:
config = get_config()
_engine = create_engine(
config.database_url,
connect_args={"check_same_thread": False},
)
# Set PRAGMAs on every new connection (foreign_keys must be per-connection)
@event.listens_for(_engine, "connect")
def _set_pragmas(dbapi_conn, _record):
cursor = dbapi_conn.cursor()
cursor.execute("PRAGMA journal_mode=WAL")
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
return _engine
def get_session():
with Session(get_engine()) as session:
yield session
def create_db_and_tables() -> None:
from . import models # noqa: ensure model classes are registered
SQLModel.metadata.create_all(get_engine())
engine = get_engine()
with engine.connect() as conn:
conn.exec_driver_sql(
"CREATE INDEX IF NOT EXISTS idx_sync_map_replica ON sync_map(replica_id)"
)
conn.exec_driver_sql(
"CREATE INDEX IF NOT EXISTS idx_sync_map_status ON sync_map(replica_id, status)"
)
conn.exec_driver_sql(
"CREATE VIRTUAL TABLE IF NOT EXISTS logs_fts "
"USING fts5(message, content=logs, content_rowid=id)"
)
conn.exec_driver_sql(
"CREATE TRIGGER IF NOT EXISTS logs_ai AFTER INSERT ON logs BEGIN "
"INSERT INTO logs_fts(rowid, message) VALUES (new.id, new.message); END"
)
conn.exec_driver_sql(
"CREATE TRIGGER IF NOT EXISTS logs_ad AFTER DELETE ON logs BEGIN "
"INSERT INTO logs_fts(logs_fts, rowid, message) "
"VALUES('delete', old.id, old.message); END"
)
conn.commit()