Add GitHub PR feedback module and fix attribution for mirrored PRs
Some checks failed
CI / lint-and-test (push) Has been cancelled

github_feedback.py posts pipeline status to GitHub PRs at three touchpoints:
discovery ack, eval review result, and merge/close outcome. Only fires for
PRs with a github_pr link (set by sync-mirror.sh). All calls non-fatal.

contributor.py: expanded git author fallback to scan all non-merge commits
(was only checking last commit), added teleo-bot and github-actions[bot]
to bot filter list.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
m3taversal 2026-04-16 18:16:28 +01:00
parent 13f21f7732
commit 0f868aefab
5 changed files with 235 additions and 9 deletions

View file

@ -153,19 +153,22 @@ async def record_contributor_attribution(conn, pr_number: int, branch: str, git_
agent_id = agent_id_match.group(1).strip() if agent_id_match else None
upsert_contributor(conn, handle, agent_id, current_role, today)
# Fallback: if no attribution block found, try git commit author then branch agent
# Fallback: if no Pentagon-Agent trailer found, try git commit authors
_BOT_AUTHORS = frozenset({
"m3taversal", "teleo", "teleo-bot", "pipeline",
"github-actions[bot]", "forgejo-actions",
})
if not agents_found:
# For external contributors: parse git commit author as attribution source
rc_author, author_output = await git_fn(
"log", f"origin/main..origin/{branch}", "--format=%an", "-1",
timeout=10,
"log", f"origin/main..origin/{branch}", "--no-merges",
"--format=%an", timeout=10,
)
if rc_author == 0 and author_output.strip():
author_name = author_output.strip().lower()
# Skip generic/bot authors — fall through to branch agent
if author_name not in ("m3taversal", "teleo", "pipeline", ""):
upsert_contributor(conn, author_name, None, "extractor", today)
agents_found.add(author_name)
for author_line in author_output.strip().split("\n"):
author_name = author_line.strip().lower()
if author_name and author_name not in _BOT_AUTHORS:
upsert_contributor(conn, author_name, None, "extractor", today)
agents_found.add(author_name)
if not agents_found:
row = conn.execute("SELECT agent FROM prs WHERE number = ?", (pr_number,)).fetchone()

View file

@ -18,6 +18,7 @@ from . import config, db
from .eval_parse import classify_issues
from .feedback import format_rejection_comment
from .forgejo import api as forgejo_api, get_agent_token, repo_path
from .github_feedback import on_closed, on_eval_complete
from .pr_state import close_pr
logger = logging.getLogger("pipeline.eval_actions")
@ -81,6 +82,11 @@ async def terminate_pr(conn, pr_number: int, reason: str):
logger.warning("PR #%d: Forgejo close failed — skipping source requeue, will retry next cycle", pr_number)
return
try:
await on_closed(conn, pr_number, reason=reason)
except Exception:
logger.exception("PR #%d: GitHub close feedback failed (non-fatal)", pr_number)
# Tag source for re-extraction with feedback
cursor = conn.execute(
"""UPDATE sources SET status = 'needs_reextraction',

View file

@ -41,6 +41,7 @@ from .forgejo import get_agent_token, get_pr_diff, repo_path
from .merge import PIPELINE_OWNED_PREFIXES
from .llm import run_batch_domain_review, run_domain_review, run_leo_review, triage_pr
from .eval_actions import dispose_rejected_pr, post_formal_approvals, terminate_pr
from .github_feedback import on_eval_complete
from .pr_state import approve_pr, close_pr, reopen_pr, start_review
from .validate import load_existing_claims
@ -267,6 +268,12 @@ async def evaluate_pr(conn, pr_number: int, tier: str = None) -> dict:
# Disposition: check if this PR should be terminated or kept open
await dispose_rejected_pr(conn, pr_number, eval_attempts, domain_issues)
try:
await on_eval_complete(conn, pr_number, outcome="rejected",
review_text=domain_review, issues=domain_issues)
except Exception:
logger.exception("PR #%d: GitHub eval feedback failed (non-fatal)", pr_number)
if domain_verdict != "skipped":
pr_cost += costs.record_usage(
conn, config.EVAL_DOMAIN_MODEL, "eval_domain",
@ -358,6 +365,12 @@ async def evaluate_pr(conn, pr_number: int, tier: str = None) -> dict:
logger.info("PR #%d: APPROVED + auto_merge (agent branch %s)", pr_number, branch_name)
else:
logger.info("PR #%d: APPROVED (tier=%s, leo=%s, domain=%s)", pr_number, tier, leo_verdict, domain_verdict)
try:
await on_eval_complete(conn, pr_number, outcome="approved",
review_text=leo_review or domain_review)
except Exception:
logger.exception("PR #%d: GitHub eval feedback failed (non-fatal)", pr_number)
else:
# Collect all issue tags from both reviews
all_issues = []
@ -398,6 +411,12 @@ async def evaluate_pr(conn, pr_number: int, tier: str = None) -> dict:
# Disposition: check if this PR should be terminated or kept open
await dispose_rejected_pr(conn, pr_number, eval_attempts, all_issues)
try:
await on_eval_complete(conn, pr_number, outcome="rejected",
review_text=leo_review or domain_review, issues=all_issues)
except Exception:
logger.exception("PR #%d: GitHub eval feedback failed (non-fatal)", pr_number)
# Record cost (only for reviews that actually ran)
if domain_verdict != "skipped":
pr_cost += costs.record_usage(

185
lib/github_feedback.py Normal file
View file

@ -0,0 +1,185 @@
"""GitHub PR feedback — posts pipeline status to GitHub PRs for external contributors.
Three touchpoints:
1. Discovery ack: when pipeline discovers a mirrored PR
2. Eval review: when evaluation completes (approved or rejected with reasoning)
3. Merge/close outcome: when PR is merged or permanently closed
Only fires for PRs with a github_pr link (set by sync-mirror.sh).
All calls are non-fatal GitHub feedback never blocks the pipeline.
"""
import logging
import os
import aiohttp
from . import config
logger = logging.getLogger("pipeline.github_feedback")
GITHUB_API = "https://api.github.com"
GITHUB_REPO = "living-ip/teleo-codex"
_BOT_ACCOUNTS = frozenset({"m3taversal", "teleo-bot", "teleo", "github-actions[bot]"})
def _github_pat() -> str | None:
pat_file = config.SECRETS_DIR / "github-pat"
if pat_file.exists():
return pat_file.read_text().strip()
return os.environ.get("GITHUB_PAT")
async def _post_comment(github_pr: int, body: str) -> bool:
pat = _github_pat()
if not pat:
logger.warning("No GitHub PAT — skipping feedback for GH PR #%d", github_pr)
return False
url = f"{GITHUB_API}/repos/{GITHUB_REPO}/issues/{github_pr}/comments"
headers = {
"Authorization": f"Bearer {pat}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
try:
async with aiohttp.ClientSession() as session:
async with session.post(
url, headers=headers, json={"body": body},
timeout=aiohttp.ClientTimeout(total=30),
) as resp:
if resp.status >= 400:
text = await resp.text()
logger.error("GitHub comment on PR #%d failed: %d %s", github_pr, resp.status, text[:200])
return False
logger.info("GitHub comment posted on PR #%d", github_pr)
return True
except Exception:
logger.exception("GitHub comment on PR #%d failed", github_pr)
return False
async def _close_github_pr(github_pr: int) -> bool:
pat = _github_pat()
if not pat:
return False
url = f"{GITHUB_API}/repos/{GITHUB_REPO}/pulls/{github_pr}"
headers = {
"Authorization": f"Bearer {pat}",
"Accept": "application/vnd.github+json",
"X-GitHub-Api-Version": "2022-11-28",
}
try:
async with aiohttp.ClientSession() as session:
async with session.patch(
url, headers=headers, json={"state": "closed"},
timeout=aiohttp.ClientTimeout(total=30),
) as resp:
if resp.status >= 400:
text = await resp.text()
logger.error("GitHub close PR #%d failed: %d %s", github_pr, resp.status, text[:200])
return False
logger.info("GitHub PR #%d closed", github_pr)
return True
except Exception:
logger.exception("GitHub close PR #%d failed", github_pr)
return False
def _get_github_pr(conn, forgejo_pr: int) -> int | None:
row = conn.execute(
"SELECT github_pr FROM prs WHERE number = ? AND github_pr IS NOT NULL",
(forgejo_pr,),
).fetchone()
return row["github_pr"] if row else None
async def on_discovery(conn, forgejo_pr: int):
"""Post discovery acknowledgment to GitHub PR."""
gh_pr = _get_github_pr(conn, forgejo_pr)
if not gh_pr:
return
body = (
"Your contribution has been received by the Teleo evaluation pipeline. "
"It's queued for automated review (priority: high).\n\n"
"You'll receive updates here as it progresses through evaluation.\n\n"
"_Automated message from the [LivingIP](https://livingip.xyz) pipeline._"
)
await _post_comment(gh_pr, body)
async def on_eval_complete(conn, forgejo_pr: int, *, outcome: str, review_text: str = None, issues: list[str] = None):
"""Post evaluation result to GitHub PR.
outcome: 'approved', 'rejected', 'changes_requested'
"""
gh_pr = _get_github_pr(conn, forgejo_pr)
if not gh_pr:
return
if outcome == "approved":
body = "**Evaluation: Approved**\n\nYour contribution passed automated review and is queued for merge."
if review_text:
body += f"\n\n<details>\n<summary>Review details</summary>\n\n{review_text[:3000]}\n\n</details>"
elif outcome == "rejected":
body = "**Evaluation: Changes Requested**\n\n"
if issues:
body += "Issues found:\n"
for issue in issues:
body += f"- {issue}\n"
if review_text:
body += f"\n<details>\n<summary>Full review</summary>\n\n{review_text[:3000]}\n\n</details>"
body += (
"\n\nThe pipeline will attempt automated fixes where possible. "
"If fixes fail, the PR will be closed — you're welcome to resubmit."
)
else:
body = f"**Evaluation: {outcome}**\n\n"
if review_text:
body += review_text[:3000]
body += "\n\n_Automated message from the [LivingIP](https://livingip.xyz) pipeline._"
await _post_comment(gh_pr, body)
async def on_merged(conn, forgejo_pr: int, *, claims_count: int = None):
"""Post merge confirmation and close GitHub PR."""
gh_pr = _get_github_pr(conn, forgejo_pr)
if not gh_pr:
return
body = "**Merged!** Your contribution has been merged into the knowledge base."
if claims_count and claims_count > 0:
body += f" ({claims_count} claim{'s' if claims_count != 1 else ''} added)"
body += (
"\n\nThank you for contributing to LivingIP. "
"Your attribution has been recorded.\n\n"
"_Automated message from the [LivingIP](https://livingip.xyz) pipeline._"
)
await _post_comment(gh_pr, body)
await _close_github_pr(gh_pr)
async def on_closed(conn, forgejo_pr: int, *, reason: str = None):
"""Post closure notification and close GitHub PR."""
gh_pr = _get_github_pr(conn, forgejo_pr)
if not gh_pr:
return
body = "**Closed.** "
if reason:
body += reason
else:
body += "This PR was closed after evaluation."
body += (
"\n\nYou're welcome to resubmit with changes. "
"See the evaluation feedback above for guidance.\n\n"
"_Automated message from the [LivingIP](https://livingip.xyz) pipeline._"
)
await _post_comment(gh_pr, body)
await _close_github_pr(gh_pr)

View file

@ -46,6 +46,7 @@ for _prefix in PIPELINE_OWNED_PREFIXES:
from .cascade import cascade_after_merge
from .cross_domain import cross_domain_after_merge
from .forgejo import get_agent_token, get_pr_diff, repo_path
from .github_feedback import on_discovery, on_merged, on_closed
logger = logging.getLogger("pipeline.merge")
@ -146,6 +147,12 @@ async def discover_external_prs(conn) -> int:
if origin == "human":
await _post_ack_comment(pr["number"])
# GitHub PR feedback for mirrored PRs
try:
await on_discovery(conn, pr["number"])
except Exception:
logger.exception("PR #%d: GitHub discovery feedback failed (non-fatal)", pr["number"])
discovered += 1
if len(prs) < 50:
@ -860,6 +867,12 @@ async def _merge_domain_queue(conn, domain: str) -> tuple[int, int]:
except Exception:
logger.exception("PR #%d: cross_domain failed (non-fatal)", pr_num)
# GitHub PR feedback: notify contributor of merge
try:
await on_merged(conn, pr_num)
except Exception:
logger.exception("PR #%d: GitHub merge feedback failed (non-fatal)", pr_num)
conn.commit() # Commit DB writes before slow branch deletion
# Delete remote branch immediately (Ganymede Q4)