Phase 1 — Audit logging infrastructure: - review_records table (migration v12) capturing every eval verdict with outcome, rejection reason, disagreement type - Cascade automation: auto-flag dependent beliefs/positions when merged claims change - Merge frontmatter stamps: last_review metadata on merged claim files Phase 2 — Cross-domain and state tracking: - Cross-domain citation index: entity overlap detection across domains on every merge - Agent-state schema v1: file-backed state for VPS agents (memory, tasks, inbox, metrics) - Cascade completion tracking: process-cascade-inbox.py logs review outcomes - research-session.sh: state hooks + cascade processing integration All changes are live on VPS. This commit brings the code under version control for review. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
258 lines
6.4 KiB
Bash
Executable file
258 lines
6.4 KiB
Bash
Executable file
#!/bin/bash
|
|
# lib-state.sh — Bash helpers for reading/writing agent state files.
|
|
# Source this in pipeline scripts: source ops/agent-state/lib-state.sh
|
|
#
|
|
# All writes use atomic rename (write to .tmp, then mv) to prevent corruption.
|
|
# All reads return valid JSON or empty string on missing/corrupt files.
|
|
|
|
STATE_ROOT="${TELEO_STATE_ROOT:-/opt/teleo-eval/agent-state}"
|
|
|
|
# --- Internal helpers ---
|
|
|
|
_state_dir() {
|
|
local agent="$1"
|
|
echo "$STATE_ROOT/$agent"
|
|
}
|
|
|
|
# Atomic write: write to tmp file, then rename. Prevents partial reads.
|
|
_atomic_write() {
|
|
local filepath="$1"
|
|
local content="$2"
|
|
local tmpfile="${filepath}.tmp.$$"
|
|
echo "$content" > "$tmpfile"
|
|
mv -f "$tmpfile" "$filepath"
|
|
}
|
|
|
|
# --- Report (current status) ---
|
|
|
|
state_read_report() {
|
|
local agent="$1"
|
|
local file="$(_state_dir "$agent")/report.json"
|
|
[ -f "$file" ] && cat "$file" || echo "{}"
|
|
}
|
|
|
|
state_update_report() {
|
|
local agent="$1"
|
|
local status="$2"
|
|
local summary="$3"
|
|
local file="$(_state_dir "$agent")/report.json"
|
|
|
|
# Read existing, merge with updates using python (available on VPS)
|
|
python3 -c "
|
|
import json, sys
|
|
try:
|
|
with open('$file') as f:
|
|
data = json.load(f)
|
|
except:
|
|
data = {'agent': '$agent'}
|
|
data['status'] = '$status'
|
|
data['summary'] = '''$summary'''
|
|
data['updated_at'] = '$(date -u +%Y-%m-%dT%H:%M:%SZ)'
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
}
|
|
|
|
# Variant that takes full JSON from stdin
|
|
_atomic_write_stdin() {
|
|
local filepath="$1"
|
|
local tmpfile="${filepath}.tmp.$$"
|
|
cat > "$tmpfile"
|
|
mv -f "$tmpfile" "$filepath"
|
|
}
|
|
|
|
# Full report update with session info (called at session end)
|
|
state_finalize_report() {
|
|
local agent="$1"
|
|
local status="$2"
|
|
local summary="$3"
|
|
local session_id="$4"
|
|
local started_at="$5"
|
|
local ended_at="$6"
|
|
local outcome="$7"
|
|
local sources="$8"
|
|
local branch="$9"
|
|
local pr_number="${10}"
|
|
local next_priority="${11:-null}"
|
|
local file="$(_state_dir "$agent")/report.json"
|
|
|
|
python3 -c "
|
|
import json
|
|
data = {
|
|
'agent': '$agent',
|
|
'updated_at': '$ended_at',
|
|
'status': '$status',
|
|
'summary': '''$summary''',
|
|
'current_task': None,
|
|
'last_session': {
|
|
'id': '$session_id',
|
|
'started_at': '$started_at',
|
|
'ended_at': '$ended_at',
|
|
'outcome': '$outcome',
|
|
'sources_archived': $sources,
|
|
'branch': '$branch',
|
|
'pr_number': $pr_number
|
|
},
|
|
'blocked_by': None,
|
|
'next_priority': $([ "$next_priority" = "null" ] && echo "None" || echo "'$next_priority'")
|
|
}
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
}
|
|
|
|
# --- Session ---
|
|
|
|
state_start_session() {
|
|
local agent="$1"
|
|
local session_id="$2"
|
|
local type="$3"
|
|
local domain="$4"
|
|
local branch="$5"
|
|
local model="${6:-sonnet}"
|
|
local timeout="${7:-5400}"
|
|
local started_at
|
|
started_at="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
|
local file="$(_state_dir "$agent")/session.json"
|
|
|
|
python3 -c "
|
|
import json
|
|
data = {
|
|
'agent': '$agent',
|
|
'session_id': '$session_id',
|
|
'started_at': '$started_at',
|
|
'ended_at': None,
|
|
'type': '$type',
|
|
'domain': '$domain',
|
|
'branch': '$branch',
|
|
'status': 'running',
|
|
'model': '$model',
|
|
'timeout_seconds': $timeout,
|
|
'research_question': None,
|
|
'belief_targeted': None,
|
|
'disconfirmation_target': None,
|
|
'sources_archived': 0,
|
|
'sources_expected': 0,
|
|
'tokens_used': None,
|
|
'cost_usd': None,
|
|
'errors': [],
|
|
'handoff_notes': None
|
|
}
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
|
|
echo "$started_at"
|
|
}
|
|
|
|
state_end_session() {
|
|
local agent="$1"
|
|
local outcome="$2"
|
|
local sources="${3:-0}"
|
|
local pr_number="${4:-null}"
|
|
local file="$(_state_dir "$agent")/session.json"
|
|
|
|
python3 -c "
|
|
import json
|
|
with open('$file') as f:
|
|
data = json.load(f)
|
|
data['ended_at'] = '$(date -u +%Y-%m-%dT%H:%M:%SZ)'
|
|
data['status'] = '$outcome'
|
|
data['sources_archived'] = $sources
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
}
|
|
|
|
# --- Journal (append-only JSONL) ---
|
|
|
|
state_journal_append() {
|
|
local agent="$1"
|
|
local event="$2"
|
|
shift 2
|
|
# Remaining args are key=value pairs for extra fields
|
|
local file="$(_state_dir "$agent")/journal.jsonl"
|
|
local extras=""
|
|
for kv in "$@"; do
|
|
local key="${kv%%=*}"
|
|
local val="${kv#*=}"
|
|
extras="$extras, \"$key\": \"$val\""
|
|
done
|
|
echo "{\"ts\":\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\",\"event\":\"$event\"$extras}" >> "$file"
|
|
}
|
|
|
|
# --- Metrics ---
|
|
|
|
state_update_metrics() {
|
|
local agent="$1"
|
|
local outcome="$2"
|
|
local sources="${3:-0}"
|
|
local file="$(_state_dir "$agent")/metrics.json"
|
|
|
|
python3 -c "
|
|
import json
|
|
try:
|
|
with open('$file') as f:
|
|
data = json.load(f)
|
|
except:
|
|
data = {'agent': '$agent', 'lifetime': {}, 'rolling_30d': {}}
|
|
|
|
lt = data.setdefault('lifetime', {})
|
|
lt['sessions_total'] = lt.get('sessions_total', 0) + 1
|
|
if '$outcome' == 'completed':
|
|
lt['sessions_completed'] = lt.get('sessions_completed', 0) + 1
|
|
elif '$outcome' == 'timeout':
|
|
lt['sessions_timeout'] = lt.get('sessions_timeout', 0) + 1
|
|
elif '$outcome' == 'error':
|
|
lt['sessions_error'] = lt.get('sessions_error', 0) + 1
|
|
lt['sources_archived'] = lt.get('sources_archived', 0) + $sources
|
|
|
|
data['updated_at'] = '$(date -u +%Y-%m-%dT%H:%M:%SZ)'
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
}
|
|
|
|
# --- Inbox ---
|
|
|
|
state_check_inbox() {
|
|
local agent="$1"
|
|
local inbox="$(_state_dir "$agent")/inbox"
|
|
[ -d "$inbox" ] && ls "$inbox"/*.json 2>/dev/null || true
|
|
}
|
|
|
|
state_send_message() {
|
|
local from="$1"
|
|
local to="$2"
|
|
local type="$3"
|
|
local subject="$4"
|
|
local body="$5"
|
|
local inbox="$(_state_dir "$to")/inbox"
|
|
local msg_id="msg-$(date +%s)-$$"
|
|
local file="$inbox/${msg_id}.json"
|
|
|
|
mkdir -p "$inbox"
|
|
python3 -c "
|
|
import json
|
|
data = {
|
|
'id': '$msg_id',
|
|
'from': '$from',
|
|
'to': '$to',
|
|
'created_at': '$(date -u +%Y-%m-%dT%H:%M:%SZ)',
|
|
'type': '$type',
|
|
'priority': 'normal',
|
|
'subject': '''$subject''',
|
|
'body': '''$body''',
|
|
'source_ref': None,
|
|
'expires_at': None
|
|
}
|
|
print(json.dumps(data, indent=2))
|
|
" | _atomic_write_stdin "$file"
|
|
echo "$msg_id"
|
|
}
|
|
|
|
# --- State directory check ---
|
|
|
|
state_ensure_dir() {
|
|
local agent="$1"
|
|
local dir="$(_state_dir "$agent")"
|
|
if [ ! -d "$dir" ]; then
|
|
echo "ERROR: Agent state not initialized for $agent. Run bootstrap.sh first." >&2
|
|
return 1
|
|
fi
|
|
}
|