Imports 67 files from VPS (/opt/teleo-eval/) into repo as the single source of truth. Previously only 8 of 67 files existed in repo — the rest were deployed directly to VPS via SCP, causing massive drift. Includes: - pipeline/lib/: 33 Python modules (daemon core, extraction, evaluation, merge, cascade, cross-domain, costs, attribution, etc.) - pipeline/: main daemon (teleo-pipeline.py), reweave.py, batch-extract-50.sh - diagnostics/: 19 files (4-page dashboard, alerting, daily digest, review queue, tier1 metrics) - agent-state/: bootstrap, lib-state, cascade inbox processor, schema - systemd/: service unit files for reference - deploy.sh: rsync-based deploy with --dry-run, syntax checks, dirty-tree gate - research-session.sh: updated with Step 8.5 digest + cascade inbox processing No new code written — all files are exact copies from VPS as of 2026-04-06. From this point forward: edit in repo, commit, then deploy.sh. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
57 lines
1.9 KiB
Python
57 lines
1.9 KiB
Python
"""Tier 1 Metrics — API routes for Argus dashboard.
|
|
|
|
Four endpoints:
|
|
GET /api/yield — extraction yield per agent per day
|
|
GET /api/cost-per-claim — cost per merged claim per day + stage breakdown
|
|
GET /api/fix-rates — fix success rate by rejection tag
|
|
GET /api/compute-profile — full compute telemetry (cache, latency, cost estimates)
|
|
|
|
All accept ?days=N (default 30) to control lookback window.
|
|
|
|
Owner: Argus <69AF7290-758F-464B-B472-04AFCA4AB340>
|
|
"""
|
|
|
|
from aiohttp import web
|
|
|
|
from tier1_metrics import cost_per_merged_claim, compute_profile, extraction_yield, fix_success_by_tag
|
|
|
|
|
|
def _parse_days(request, default=30):
|
|
"""Parse and clamp ?days= parameter. Returns 1..365."""
|
|
try:
|
|
days = int(request.query.get("days", str(default)))
|
|
except (ValueError, TypeError):
|
|
days = default
|
|
return max(1, min(days, 365))
|
|
|
|
|
|
async def handle_yield(request):
|
|
conn = request.app["_get_conn"]()
|
|
days = _parse_days(request)
|
|
return web.json_response(extraction_yield(conn, days))
|
|
|
|
|
|
async def handle_cost_per_claim(request):
|
|
conn = request.app["_get_conn"]()
|
|
days = _parse_days(request)
|
|
return web.json_response(cost_per_merged_claim(conn, days))
|
|
|
|
|
|
async def handle_fix_rates(request):
|
|
conn = request.app["_get_conn"]()
|
|
days = _parse_days(request)
|
|
return web.json_response(fix_success_by_tag(conn, days))
|
|
|
|
|
|
async def handle_compute_profile(request):
|
|
conn = request.app["_get_conn"]()
|
|
days = _parse_days(request)
|
|
return web.json_response(compute_profile(conn, days))
|
|
|
|
|
|
def register_tier1_routes(app: web.Application, get_conn):
|
|
app["_get_conn"] = get_conn
|
|
app.router.add_get("/api/yield", handle_yield)
|
|
app.router.add_get("/api/cost-per-claim", handle_cost_per_claim)
|
|
app.router.add_get("/api/fix-rates", handle_fix_rates)
|
|
app.router.add_get("/api/compute-profile", handle_compute_profile)
|