epimetheus: X link fetching + Haiku pre-pass + systemd fix + query tuning
Major changes this session: - fetch_tweet_by_url: extracts username+ID from X URLs, tries article endpoint, falls back to from:username search. Tweets injected into Opus prompt. - Haiku pre-pass: decides if X search needed before Opus responds. 2-3 word queries. - systemd ProtectSystem paths fixed (ROOT CAUSE of all write failures since day 1) - Research regex handles Telegram @botname suffix in groups - Double research message prevented (skip RESEARCH: tag when Haiku already ran) - Engagement filter dropped to 0 for niche crypto tweets - Heuristic brevity in prompt (not hard cap) - DM auto-respond gating (groups: reply-to only, DMs: auto-respond) - All code now edited in pipeline-v2 repo, not /tmp Pentagon-Agent: Epimetheus <3D35839A-7722-4740-B93D-51157F7D5E70>
This commit is contained in:
parent
ed46c0674b
commit
76d5644272
2 changed files with 92 additions and 0 deletions
|
|
@ -484,6 +484,24 @@ async def handle_tagged(update: Update, context: ContextTypes.DEFAULT_TYPE):
|
|||
# Send typing indicator
|
||||
await msg.chat.send_action("typing")
|
||||
|
||||
# Fetch any X/Twitter links in the message (tweet or article)
|
||||
x_link_context = ""
|
||||
x_urls = re.findall(r'https?://(?:twitter\.com|x\.com)/\w+/status/\d+', text)
|
||||
if x_urls:
|
||||
from x_search import fetch_tweet_by_url
|
||||
for url in x_urls[:3]: # Cap at 3 links
|
||||
try:
|
||||
tweet_data = await fetch_tweet_by_url(url)
|
||||
if tweet_data:
|
||||
x_link_context += f"\n## Linked Tweet by @{tweet_data['author']}\n"
|
||||
if tweet_data.get("title"):
|
||||
x_link_context += f"Title: {tweet_data['title']}\n"
|
||||
x_link_context += f"{tweet_data['text'][:500]}\n"
|
||||
x_link_context += f"Engagement: {tweet_data.get('engagement', 0)} | URL: {url}\n"
|
||||
logger.info("Fetched X link: @%s — %s", tweet_data['author'], tweet_data['text'][:60])
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch X link %s: %s", url, e)
|
||||
|
||||
# Haiku pre-pass: does this message need an X search? (Option A: two-pass)
|
||||
if not research_context: # Skip if /research already ran
|
||||
try:
|
||||
|
|
@ -580,6 +598,8 @@ Write like a sharp analyst talking to peers, not like an AI. Specifically:
|
|||
|
||||
{research_context}
|
||||
|
||||
{x_link_context}
|
||||
|
||||
## Conversation History (NEVER ask a question your history already answers)
|
||||
{_format_conversation_history(msg.chat_id, user.id if user else 0)}
|
||||
|
||||
|
|
|
|||
|
|
@ -157,3 +157,75 @@ tags: [x-research, telegram-research]
|
|||
Engagement: {tweet.get('likes', 0)} likes, {tweet.get('retweets', 0)} retweets, {tweet.get('replies', 0)} replies
|
||||
Author followers: {tweet.get('author_followers', 0)}
|
||||
"""
|
||||
|
||||
|
||||
async def fetch_tweet_by_url(url: str) -> dict | None:
|
||||
"""Fetch a specific tweet/article by X URL. Extracts username and tweet ID,
|
||||
searches via advanced_search (tweet/detail doesn't work with this API provider).
|
||||
"""
|
||||
import re as _re
|
||||
|
||||
# Extract username and tweet ID from URL
|
||||
match = _re.search(r'(?:twitter\.com|x\.com)/(\w+)/status/(\d+)', url)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
username = match.group(1)
|
||||
tweet_id = match.group(2)
|
||||
|
||||
key = _load_api_key()
|
||||
if not key:
|
||||
return None
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# First try article endpoint
|
||||
async with session.get(
|
||||
"https://api.twitterapi.io/twitter/article",
|
||||
params={"tweet_id": tweet_id},
|
||||
headers={"X-API-Key": key},
|
||||
timeout=aiohttp.ClientTimeout(total=10),
|
||||
) as resp:
|
||||
if resp.status == 200:
|
||||
data = await resp.json()
|
||||
article = data.get("article")
|
||||
if article:
|
||||
return {
|
||||
"text": article.get("text", article.get("content", "")),
|
||||
"url": url,
|
||||
"author": username,
|
||||
"author_name": article.get("author", {}).get("name", ""),
|
||||
"author_followers": article.get("author", {}).get("followers", 0),
|
||||
"engagement": 0,
|
||||
"tweet_date": article.get("createdAt", ""),
|
||||
"is_article": True,
|
||||
"title": article.get("title", ""),
|
||||
}
|
||||
|
||||
# Fallback: search from:username and match by ID
|
||||
async with session.get(
|
||||
API_URL,
|
||||
params={"query": f"from:{username}", "queryType": "Latest"},
|
||||
headers={"X-API-Key": key},
|
||||
timeout=aiohttp.ClientTimeout(total=10),
|
||||
) as resp:
|
||||
if resp.status >= 400:
|
||||
return None
|
||||
data = await resp.json()
|
||||
for tweet in data.get("tweets", []):
|
||||
if str(tweet.get("id")) == tweet_id:
|
||||
author = tweet.get("author", {})
|
||||
return {
|
||||
"text": tweet.get("text", ""),
|
||||
"url": url,
|
||||
"author": author.get("userName", username),
|
||||
"author_name": author.get("name", ""),
|
||||
"author_followers": author.get("followers", 0),
|
||||
"engagement": (tweet.get("likeCount", 0) or 0) + (tweet.get("retweetCount", 0) or 0),
|
||||
"tweet_date": tweet.get("createdAt", ""),
|
||||
"is_article": False,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning("Tweet fetch error for %s: %s", url, e)
|
||||
|
||||
return None
|
||||
|
|
|
|||
Loading…
Reference in a new issue