from fastapi import APIRouter, Depends from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from app.config import settings from app.core.dependencies import get_current_admin, get_current_user from app.database import get_db from app.models import AppSetting from app.models.user import User from app.schemas.schemas import SettingUpdate, SettingsResponse router = APIRouter() @router.get("", response_model=SettingsResponse) async def get_settings( db: AsyncSession = Depends(get_db), current_user: User = Depends(get_current_user), ): """Return current effective settings (env + DB overrides).""" # DB overrides take precedence over env vars overrides: dict[str, str] = {} result = await db.execute(select(AppSetting)) for row in result.scalars().all(): overrides[row.key] = row.value return SettingsResponse( llm_provider=overrides.get("llm_provider", settings.LLM_PROVIDER), llm_model=overrides.get("llm_model", _current_model(overrides.get("llm_provider", settings.LLM_PROVIDER))), congress_poll_interval_minutes=int(overrides.get("congress_poll_interval_minutes", settings.CONGRESS_POLL_INTERVAL_MINUTES)), newsapi_enabled=bool(settings.NEWSAPI_KEY), pytrends_enabled=settings.PYTRENDS_ENABLED, ) @router.put("") async def update_setting( body: SettingUpdate, db: AsyncSession = Depends(get_db), current_user: User = Depends(get_current_admin), ): """Update a runtime setting.""" ALLOWED_KEYS = {"llm_provider", "llm_model", "congress_poll_interval_minutes"} if body.key not in ALLOWED_KEYS: from fastapi import HTTPException raise HTTPException(status_code=400, detail=f"Allowed setting keys: {ALLOWED_KEYS}") existing = await db.get(AppSetting, body.key) if existing: existing.value = body.value else: db.add(AppSetting(key=body.key, value=body.value)) await db.commit() return {"key": body.key, "value": body.value} @router.post("/test-llm") async def test_llm_connection(current_user: User = Depends(get_current_admin)): """Test that the configured LLM provider responds correctly.""" from app.services.llm_service import get_llm_provider try: provider = get_llm_provider() brief = provider.generate_brief( doc_text="This is a test bill for connection verification purposes.", bill_metadata={ "title": "Test Connection Bill", "sponsor_name": "Test Sponsor", "party": "Test", "state": "DC", "chamber": "House", "introduced_date": "2025-01-01", "latest_action_text": "Test action", "latest_action_date": "2025-01-01", }, ) return { "status": "ok", "provider": brief.llm_provider, "model": brief.llm_model, "summary_preview": brief.summary[:100] + "..." if len(brief.summary) > 100 else brief.summary, } except Exception as e: return {"status": "error", "detail": str(e)} def _current_model(provider: str) -> str: if provider == "openai": return settings.OPENAI_MODEL elif provider == "anthropic": return settings.ANTHROPIC_MODEL elif provider == "gemini": return settings.GEMINI_MODEL elif provider == "ollama": return settings.OLLAMA_MODEL return "unknown"