epimetheus: compressed conversation context + decouple archive from lock
1. Conversation history now shows compressed context summary first (tickers, key figures, exchange count) before full log. "Discussing: $FUTARDIO | Key figures: $0.004, $39.5K | Exchanges: 3" 20 tokens, unmissable. Plus prompt instruction: "NEVER ask a question your history already answers." (Ganymede: Option C+A) 2. Archive file writes decoupled from worktree lock. File written unlocked (additive, no coordination needed). Git commit attempted with lock — deferred on timeout, file persists on disk for next cycle. Fixes "Read-only file system" archive failures. (Ganymede review) Pentagon-Agent: Epimetheus <3D35839A-7722-4740-B93D-51157F7D5E70>
This commit is contained in:
parent
8d10c8ee28
commit
a11eca90e3
1 changed files with 45 additions and 16 deletions
|
|
@ -269,16 +269,40 @@ def _save_learning(correction: str, category: str = "factual"):
|
|||
logger.warning("Learning save failed: %s", e)
|
||||
|
||||
|
||||
def _compress_history(history: list[dict]) -> str:
|
||||
"""Extract key context from conversation history — 20 tokens, unmissable (Ganymede)."""
|
||||
if not history:
|
||||
return ""
|
||||
# Combine all text for entity/number extraction
|
||||
all_text = " ".join(h.get("user", "") + " " + h.get("bot", "") for h in history)
|
||||
tickers = sorted(set(re.findall(r"\$[A-Z]{2,10}", all_text)))
|
||||
numbers = re.findall(r"\$[\d,.]+[KMB]?|\d+\.?\d*%", all_text)
|
||||
parts = []
|
||||
if tickers:
|
||||
parts.append(f"Discussing: {', '.join(tickers)}")
|
||||
if numbers:
|
||||
parts.append(f"Key figures: {', '.join(numbers[:5])}")
|
||||
parts.append(f"Exchanges: {len(history)}")
|
||||
return " | ".join(parts)
|
||||
|
||||
|
||||
def _format_conversation_history(chat_id: int, user_id: int) -> str:
|
||||
"""Format conversation history for injection into the Opus prompt."""
|
||||
"""Format conversation history with compressed context summary (Ganymede: Option C+A)."""
|
||||
key = (chat_id, user_id)
|
||||
history = conversation_history.get(key, [])
|
||||
if not history:
|
||||
return "(No prior conversation)"
|
||||
lines = []
|
||||
|
||||
# Compressed context first — hard for the model to miss
|
||||
summary = _compress_history(history)
|
||||
lines = [summary, ""]
|
||||
|
||||
# Full exchange log for reference
|
||||
for exchange in history:
|
||||
lines.append(f"User: {exchange['user']}")
|
||||
lines.append(f"Rio: {exchange['bot']}")
|
||||
if exchange.get("user"):
|
||||
lines.append(f"User: {exchange['user']}")
|
||||
if exchange.get("bot"):
|
||||
lines.append(f"Rio: {exchange['bot']}")
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
|
@ -496,7 +520,7 @@ async def handle_tagged(update: Update, context: ContextTypes.DEFAULT_TYPE):
|
|||
pass # Market data is supplementary — never block on failure
|
||||
|
||||
# Build Opus prompt — Rio's voice
|
||||
prompt = f"""You are Rio, an internet finance analyst responding in a Telegram group. You have deep knowledge about futarchy, prediction markets, token governance, and the MetaDAO ecosystem.
|
||||
prompt = f"""You are Rio, the Teleo internet finance agent. Your Telegram handle is @FutAIrdBot — that IS you. Users tag @FutAIrdBot to reach you. Never say "I'm not FutAIrdBot." You are also @futaRdIO on X. You have deep knowledge about futarchy, prediction markets, token governance, and the MetaDAO ecosystem.
|
||||
|
||||
## How to sound
|
||||
Write like a sharp analyst talking to peers, not like an AI. Specifically:
|
||||
|
|
@ -515,7 +539,7 @@ Write like a sharp analyst talking to peers, not like an AI. Specifically:
|
|||
|
||||
{f"## Live Market Data{chr(10)}{market_context}" if market_context else ""}
|
||||
|
||||
## Conversation History
|
||||
## Conversation History (NEVER ask a question your history already answers)
|
||||
{_format_conversation_history(msg.chat_id, user.id if user else 0)}
|
||||
|
||||
## The message you're responding to
|
||||
|
|
@ -646,13 +670,15 @@ tags: [telegram, ownership-community]
|
|||
**Intake tier:** {intake_tier} — {'fast-tracked, contributor provided reasoning' if intake_tier == 'directed' else 'standard processing'}
|
||||
**Triage:** Conversation may contain [CLAIM], [ENTITY], or [EVIDENCE] for extraction.
|
||||
"""
|
||||
with main_worktree_lock(timeout=10):
|
||||
archive_path.write_text(content)
|
||||
logger.info("Archived exchange to %s (tier: %s, urls: %d)",
|
||||
filename, intake_tier, len(urls or []))
|
||||
# Write file unlocked — additive, no coordination needed (Ganymede: decouple)
|
||||
archive_path.write_text(content)
|
||||
logger.info("Archived exchange to %s (tier: %s, urls: %d)",
|
||||
filename, intake_tier, len(urls or []))
|
||||
# Commit with lock — deferred on timeout, file persists on disk
|
||||
try:
|
||||
_git_commit_archive(archive_path, filename)
|
||||
except TimeoutError:
|
||||
logger.warning("Failed to archive exchange: worktree lock timeout")
|
||||
except Exception:
|
||||
logger.warning("Commit deferred for %s — file on disk, next cycle picks it up", filename)
|
||||
except Exception as e:
|
||||
logger.error("Failed to archive exchange: %s", e)
|
||||
|
||||
|
|
@ -796,11 +822,14 @@ tags: [telegram, ownership-community]
|
|||
**Triage:** [{tag}] — classified by batch triage
|
||||
**Participants:** {', '.join(f'@{u}' for u in contributors)}
|
||||
"""
|
||||
with main_worktree_lock(timeout=10):
|
||||
archive_path.write_text(content)
|
||||
logger.info("Archived window [%s]: %s (%d msgs, %d participants)",
|
||||
tag, filename, len(window), len(contributors))
|
||||
# Write file unlocked (Ganymede: decouple write from lock)
|
||||
archive_path.write_text(content)
|
||||
logger.info("Archived window [%s]: %s (%d msgs, %d participants)",
|
||||
tag, filename, len(window), len(contributors))
|
||||
try:
|
||||
_git_commit_archive(archive_path, filename)
|
||||
except Exception:
|
||||
logger.warning("Commit deferred for %s — file on disk", filename)
|
||||
except TimeoutError:
|
||||
logger.warning("Failed to archive window: worktree lock timeout")
|
||||
except Exception as e:
|
||||
|
|
|
|||
Loading…
Reference in a new issue