epimetheus: Ganymede review fixes + tweet fetch pagination
- fetch_tweet_by_url: paginate up to 3 pages to find older tweets - Return placeholder on fetch failure (Ganymede: surface failure to user) - Don't burn user rate limit on Haiku autonomous searches (Ganymede) - 7-day limitation documented in comment Pentagon-Agent: Epimetheus <3D35839A-7722-4740-B93D-51157F7D5E70>
This commit is contained in:
parent
76d5644272
commit
8f4e583c76
2 changed files with 45 additions and 23 deletions
|
|
@ -528,7 +528,7 @@ async def handle_tagged(update: Update, context: ContextTypes.DEFAULT_TYPE):
|
||||||
research_context = f"\n## Fresh X Research Results for '{search_query}'\n"
|
research_context = f"\n## Fresh X Research Results for '{search_query}'\n"
|
||||||
for t in tweets[:7]:
|
for t in tweets[:7]:
|
||||||
research_context += f"- @{t['author']}: {t['text'][:150]}\n"
|
research_context += f"- @{t['author']}: {t['text'][:150]}\n"
|
||||||
record_research_usage(user.id if user else 0)
|
# Don't burn user's rate limit on autonomous searches (Ganymede)
|
||||||
# Archive as source
|
# Archive as source
|
||||||
try:
|
try:
|
||||||
slug = re.sub(r"[^a-z0-9]+", "-", search_query[:60].lower()).strip("-")
|
slug = re.sub(r"[^a-z0-9]+", "-", search_query[:60].lower()).strip("-")
|
||||||
|
|
|
||||||
|
|
@ -203,28 +203,50 @@ async def fetch_tweet_by_url(url: str) -> dict | None:
|
||||||
}
|
}
|
||||||
|
|
||||||
# Fallback: search from:username and match by ID
|
# Fallback: search from:username and match by ID
|
||||||
async with session.get(
|
# NOTE: fallback only finds recent tweets (~7 days). Older tweets fail. (Ganymede)
|
||||||
API_URL,
|
# Try with cursor pagination to search deeper
|
||||||
params={"query": f"from:{username}", "queryType": "Latest"},
|
cursor = ""
|
||||||
headers={"X-API-Key": key},
|
for page in range(3): # Up to 3 pages
|
||||||
timeout=aiohttp.ClientTimeout(total=10),
|
params = {"query": f"from:{username}", "queryType": "Latest"}
|
||||||
) as resp:
|
if cursor:
|
||||||
if resp.status >= 400:
|
params["cursor"] = cursor
|
||||||
return None
|
async with session.get(
|
||||||
data = await resp.json()
|
API_URL,
|
||||||
for tweet in data.get("tweets", []):
|
params=params,
|
||||||
if str(tweet.get("id")) == tweet_id:
|
headers={"X-API-Key": key},
|
||||||
author = tweet.get("author", {})
|
timeout=aiohttp.ClientTimeout(total=10),
|
||||||
return {
|
) as resp:
|
||||||
"text": tweet.get("text", ""),
|
if resp.status >= 400:
|
||||||
"url": url,
|
break
|
||||||
"author": author.get("userName", username),
|
data = await resp.json()
|
||||||
"author_name": author.get("name", ""),
|
for tweet in data.get("tweets", []):
|
||||||
"author_followers": author.get("followers", 0),
|
if str(tweet.get("id")) == tweet_id:
|
||||||
"engagement": (tweet.get("likeCount", 0) or 0) + (tweet.get("retweetCount", 0) or 0),
|
author = tweet.get("author", {})
|
||||||
"tweet_date": tweet.get("createdAt", ""),
|
return {
|
||||||
"is_article": False,
|
"text": tweet.get("text", ""),
|
||||||
}
|
"url": url,
|
||||||
|
"author": author.get("userName", username),
|
||||||
|
"author_name": author.get("name", ""),
|
||||||
|
"author_followers": author.get("followers", 0),
|
||||||
|
"engagement": (tweet.get("likeCount", 0) or 0) + (tweet.get("retweetCount", 0) or 0),
|
||||||
|
"tweet_date": tweet.get("createdAt", ""),
|
||||||
|
"is_article": False,
|
||||||
|
}
|
||||||
|
cursor = data.get("next_cursor", "")
|
||||||
|
if not cursor:
|
||||||
|
break
|
||||||
|
|
||||||
|
# If still not found, return placeholder (Ganymede: surface failure)
|
||||||
|
return {
|
||||||
|
"text": f"[Could not fetch tweet content from @{username}]",
|
||||||
|
"url": url,
|
||||||
|
"author": username,
|
||||||
|
"author_name": "",
|
||||||
|
"author_followers": 0,
|
||||||
|
"engagement": 0,
|
||||||
|
"tweet_date": "",
|
||||||
|
"is_article": False,
|
||||||
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning("Tweet fetch error for %s: %s", url, e)
|
logger.warning("Tweet fetch error for %s: %s", url, e)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue