feat(email_gen): draft constituent letter generator + bill text indicators
- Add DraftLetterPanel: collapsible UI below BriefPanel for bills with a
brief; lets users select up to 3 cited points, pick stance/tone, and
generate a plain-text letter via the configured LLM provider
- Stance pre-fills from follow mode (pocket_boost → YES, pocket_veto → NO)
and clears when the user unfollows; recipient derived from bill chamber
- Add POST /api/bills/{bill_id}/draft-letter endpoint with proper LLM
provider/model resolution from AppSetting (respects Settings page choice)
- Add generate_text() to LLMProvider ABC and all four providers
- Expose has_document on BillSchema (list endpoint) via a single batch
query; BillCard shows Brief / Pending / No text indicator per bill
Authored-By: Jack Levy
This commit is contained in:
@@ -183,6 +183,10 @@ class LLMProvider(ABC):
|
||||
def generate_amendment_brief(self, new_text: str, previous_text: str, bill_metadata: dict) -> ReverseBrief:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_text(self, prompt: str) -> str:
|
||||
pass
|
||||
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
def __init__(self, model: str | None = None):
|
||||
@@ -218,6 +222,14 @@ class OpenAIProvider(LLMProvider):
|
||||
raw = response.choices[0].message.content
|
||||
return parse_brief_json(raw, "openai", self.model)
|
||||
|
||||
def generate_text(self, prompt: str) -> str:
|
||||
response = self.client.chat.completions.create(
|
||||
model=self.model,
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
temperature=0.3,
|
||||
)
|
||||
return response.choices[0].message.content or ""
|
||||
|
||||
|
||||
class AnthropicProvider(LLMProvider):
|
||||
def __init__(self, model: str | None = None):
|
||||
@@ -247,6 +259,14 @@ class AnthropicProvider(LLMProvider):
|
||||
raw = response.content[0].text
|
||||
return parse_brief_json(raw, "anthropic", self.model)
|
||||
|
||||
def generate_text(self, prompt: str) -> str:
|
||||
response = self.client.messages.create(
|
||||
model=self.model,
|
||||
max_tokens=1024,
|
||||
messages=[{"role": "user", "content": prompt}],
|
||||
)
|
||||
return response.content[0].text
|
||||
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
def __init__(self, model: str | None = None):
|
||||
@@ -272,6 +292,14 @@ class GeminiProvider(LLMProvider):
|
||||
response = self._make_model(AMENDMENT_SYSTEM_PROMPT).generate_content(prompt)
|
||||
return parse_brief_json(response.text, "gemini", self.model_name)
|
||||
|
||||
def generate_text(self, prompt: str) -> str:
|
||||
model = self._genai.GenerativeModel(
|
||||
model_name=self.model_name,
|
||||
generation_config={"temperature": 0.3},
|
||||
)
|
||||
response = model.generate_content(prompt)
|
||||
return response.text
|
||||
|
||||
|
||||
class OllamaProvider(LLMProvider):
|
||||
def __init__(self, model: str | None = None):
|
||||
@@ -326,6 +354,16 @@ class OllamaProvider(LLMProvider):
|
||||
)
|
||||
return parse_brief_json(raw2, "ollama", self.model)
|
||||
|
||||
def generate_text(self, prompt: str) -> str:
|
||||
import requests as req
|
||||
response = req.post(
|
||||
f"{self.base_url}/api/generate",
|
||||
json={"model": self.model, "prompt": prompt, "stream": False},
|
||||
timeout=120,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json().get("response", "")
|
||||
|
||||
|
||||
def get_llm_provider(provider: str | None = None, model: str | None = None) -> LLMProvider:
|
||||
"""Factory — returns the configured LLM provider.
|
||||
@@ -344,3 +382,72 @@ def get_llm_provider(provider: str | None = None, model: str | None = None) -> L
|
||||
elif provider == "ollama":
|
||||
return OllamaProvider(model=model)
|
||||
raise ValueError(f"Unknown LLM_PROVIDER: '{provider}'. Must be one of: openai, anthropic, gemini, ollama")
|
||||
|
||||
|
||||
_BILL_TYPE_LABELS: dict[str, str] = {
|
||||
"hr": "H.R.",
|
||||
"s": "S.",
|
||||
"hjres": "H.J.Res.",
|
||||
"sjres": "S.J.Res.",
|
||||
"hconres": "H.Con.Res.",
|
||||
"sconres": "S.Con.Res.",
|
||||
"hres": "H.Res.",
|
||||
"sres": "S.Res.",
|
||||
}
|
||||
|
||||
_TONE_INSTRUCTIONS: dict[str, str] = {
|
||||
"short": "Keep the letter brief — 6 to 8 sentences total.",
|
||||
"polite": "Use a respectful, formal, and courteous tone throughout the letter.",
|
||||
"firm": "Use a direct, firm tone that makes clear the constituent's strong conviction.",
|
||||
}
|
||||
|
||||
|
||||
def generate_draft_letter(
|
||||
bill_label: str,
|
||||
bill_title: str,
|
||||
stance: str,
|
||||
recipient: str,
|
||||
tone: str,
|
||||
selected_points: list[str],
|
||||
include_citations: bool,
|
||||
zip_code: str | None,
|
||||
llm_provider: str | None = None,
|
||||
llm_model: str | None = None,
|
||||
) -> str:
|
||||
"""Generate a plain-text constituent letter draft using the configured LLM provider."""
|
||||
vote_word = "YES" if stance == "yes" else "NO"
|
||||
chamber_word = "House" if recipient == "house" else "Senate"
|
||||
tone_instruction = _TONE_INSTRUCTIONS.get(tone, _TONE_INSTRUCTIONS["polite"])
|
||||
|
||||
points_block = "\n".join(f"- {p}" for p in selected_points)
|
||||
|
||||
citation_instruction = (
|
||||
"You may reference the citation label for each point (e.g. 'as noted in Section 3') if it adds clarity."
|
||||
if include_citations
|
||||
else "Do not include any citation references."
|
||||
)
|
||||
|
||||
location_line = f"The constituent is writing from ZIP code {zip_code}." if zip_code else ""
|
||||
|
||||
prompt = f"""Write a short constituent letter to a {chamber_word} member of Congress.
|
||||
|
||||
RULES:
|
||||
- {tone_instruction}
|
||||
- 6 to 12 sentences total.
|
||||
- First sentence must be a clear, direct ask: "Please vote {vote_word} on {bill_label}."
|
||||
- The body must reference ONLY the points listed below — do not invent any other claims or facts.
|
||||
- {citation_instruction}
|
||||
- Close with a brief sign-off and the placeholder "[Your Name]".
|
||||
- Plain text only. No markdown, no bullet points, no headers, no partisan framing.
|
||||
- Do not mention any political party.
|
||||
|
||||
BILL: {bill_label} — {bill_title}
|
||||
STANCE: Vote {vote_word}
|
||||
{location_line}
|
||||
|
||||
SELECTED POINTS TO REFERENCE:
|
||||
{points_block}
|
||||
|
||||
Write the letter now:"""
|
||||
|
||||
return get_llm_provider(provider=llm_provider, model=llm_model).generate_text(prompt)
|
||||
|
||||
Reference in New Issue
Block a user