feat: atomic extract-and-connect + stale PR monitor + response audit #4

Merged
m3taversal merged 70 commits from epimetheus/atomic-connect-and-stale-monitor into main 2026-03-30 11:03:35 +00:00
Showing only changes of commit ed46c0674b - Show all commits

View file

@ -288,7 +288,7 @@ async def handle_research(msg, query: str, user):
await msg.chat.send_action("typing")
logger.info("Research: searching X for '%s'", query)
tweets = await search_x(query, max_results=15, min_engagement=3)
tweets = await search_x(query, max_results=15, min_engagement=0)
logger.info("Research: got %d tweets for '%s'", len(tweets), query)
if not tweets:
await msg.reply_text(f"No recent tweets found for '{query}'.")
@ -490,10 +490,12 @@ async def handle_tagged(update: Update, context: ContextTypes.DEFAULT_TYPE):
haiku_prompt = (
f"Does this Telegram message need a live X/Twitter search to answer well? "
f"Only say YES if the user is asking about recent sentiment, community takes, "
f"what people are saying, or emerging discussions that wouldn't be in a knowledge base.\n\n"
f"what people are saying, or emerging discussions.\n\n"
f"Message: {text}\n\n"
f"If YES, provide a SHORT search query (2-3 words max, like 'P2P.me' or 'MetaDAO buyback'). "
f"Twitter search works best with simple queries — too many words returns nothing.\n\n"
f"Respond with ONLY one of:\n"
f"YES: [search query]\n"
f"YES: [2-3 word query]\n"
f"NO"
)
haiku_result = await call_openrouter("anthropic/claude-haiku-4.5", haiku_prompt, max_tokens=50)
@ -616,12 +618,14 @@ IMPORTANT: Two special tags you can append at the end of your response (after yo
logger.info("Auto-learned [%s]: %s", category, correction[:80])
# Auto-research (Ganymede: LLM-driven research trigger)
# Skip if Haiku pre-pass already searched (prevents double-fire + duplicate "No tweets found" messages)
research_lines = re.findall(r'^RESEARCH:\s+(.+)$', response, re.MULTILINE)
if research_lines:
display_response = re.sub(r'\nRESEARCH:\s+.+$', '', display_response, flags=re.MULTILINE).rstrip()
for query in research_lines:
asyncio.get_event_loop().create_task(handle_research(msg, query.strip(), user))
logger.info("Auto-research triggered: %s", query[:80])
if not research_context: # Only fire if Haiku didn't already search
for query in research_lines:
asyncio.get_event_loop().create_task(handle_research(msg, query.strip(), user))
logger.info("Auto-research triggered: %s", query[:80])
# Post response (without LEARNING lines)
await msg.reply_text(display_response)