sync VPS state: portfolio dashboard + fetch_coins.py
Some checks are pending
CI / lint-and-test (push) Waiting to run
Some checks are pending
CI / lint-and-test (push) Waiting to run
Pull live app.py from VPS to close 243-line drift. Add portfolio dashboard (renamed from v2), portfolio nav link, and fetch_coins.py (daily cron script for ownership coin data). Delete stale lib/ copy. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
cde92d3db1
commit
ddf3c25e88
4 changed files with 1238 additions and 0 deletions
|
|
@ -2277,6 +2277,9 @@ def create_app() -> web.Application:
|
|||
register_dashboard_routes(app, lambda: _conn_from_app(app))
|
||||
register_review_queue_routes(app)
|
||||
register_daily_digest_routes(app, db_path=str(DB_PATH))
|
||||
# Portfolio
|
||||
from dashboard_portfolio import register_portfolio_routes
|
||||
register_portfolio_routes(app, lambda: _conn_from_app(app))
|
||||
# Response audit - cost tracking + reasoning traces
|
||||
app["db_path"] = str(DB_PATH)
|
||||
register_response_audit_routes(app)
|
||||
|
|
|
|||
402
diagnostics/dashboard_portfolio.py
Normal file
402
diagnostics/dashboard_portfolio.py
Normal file
|
|
@ -0,0 +1,402 @@
|
|||
"""Portfolio dashboard — fixes empty chart by:
|
||||
1. Computing NAV server-side in the history API (not client-side from nulls)
|
||||
2. Only returning dates with valid NAV data
|
||||
3. Showing data points when sparse
|
||||
"""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
import logging
|
||||
from html import escape as esc
|
||||
from datetime import datetime
|
||||
|
||||
from aiohttp import web
|
||||
from shared_ui import render_page
|
||||
|
||||
logger = logging.getLogger("argus.portfolio")
|
||||
|
||||
CSS = """
|
||||
.hero-chart { background: #161b22; border: 1px solid #30363d; border-radius: 8px; padding: 20px; margin-bottom: 20px; }
|
||||
.hero-chart h2 { color: #c9d1d9; font-size: 18px; margin-bottom: 12px; }
|
||||
.range-btns { display: flex; gap: 4px; margin-bottom: 12px; }
|
||||
.range-btn { background: #21262d; border: 1px solid #30363d; color: #8b949e; padding: 5px 14px;
|
||||
border-radius: 4px; cursor: pointer; font-size: 12px; }
|
||||
.range-btn.active { background: #1f6feb33; border-color: #58a6ff; color: #58a6ff; }
|
||||
.ptable-wrap { overflow-x: auto; margin-top: 20px; }
|
||||
.ptable { width: 100%; border-collapse: collapse; font-size: 13px; }
|
||||
.ptable th { background: #161b22; color: #8b949e; font-size: 11px; text-transform: uppercase;
|
||||
letter-spacing: 0.5px; padding: 10px 12px; text-align: right; border-bottom: 1px solid #30363d;
|
||||
cursor: pointer; user-select: none; white-space: nowrap; }
|
||||
.ptable th:first-child { text-align: left; position: sticky; left: 0; background: #161b22; z-index: 1; }
|
||||
.ptable th:hover { color: #c9d1d9; }
|
||||
.ptable th.sorted-asc::after { content: ' \\25B2'; font-size: 9px; }
|
||||
.ptable th.sorted-desc::after { content: ' \\25BC'; font-size: 9px; }
|
||||
.ptable td { padding: 10px 12px; text-align: right; border-bottom: 1px solid #21262d; color: #c9d1d9; }
|
||||
.ptable td:first-child { text-align: left; position: sticky; left: 0; background: #0d1117; z-index: 1; font-weight: 600; }
|
||||
.ptable tr:hover td { background: #161b22; }
|
||||
.ptable tr:hover td:first-child { background: #161b22; }
|
||||
.summary-row td { font-weight: 700; border-top: 2px solid #30363d; background: #161b22 !important; }
|
||||
.premium { color: #f85149; }
|
||||
.discount { color: #3fb950; }
|
||||
.near-nav { color: #d29922; }
|
||||
"""
|
||||
|
||||
|
||||
def _fmt_usd(v):
|
||||
if v is None:
|
||||
return '\u2014'
|
||||
if abs(v) >= 1_000_000:
|
||||
return f'${v / 1_000_000:.1f}M'
|
||||
if abs(v) >= 1_000:
|
||||
return f'${v / 1_000:.0f}K'
|
||||
return f'${v:,.0f}'
|
||||
|
||||
|
||||
def _fmt_price(v):
|
||||
if v is None:
|
||||
return '\u2014'
|
||||
if v >= 100:
|
||||
return f'${v:,.0f}'
|
||||
if v >= 1:
|
||||
return f'${v:.2f}'
|
||||
if v >= 0.01:
|
||||
return f'${v:.4f}'
|
||||
return f'${v:.6f}'
|
||||
|
||||
|
||||
def _fmt_ratio(v):
|
||||
if v is None or v == 0:
|
||||
return '\u2014'
|
||||
return f'{v:.2f}x'
|
||||
|
||||
|
||||
def _ratio_class(v):
|
||||
if v is None or v == 0:
|
||||
return ''
|
||||
if v > 1.5:
|
||||
return 'premium'
|
||||
if v < 0.9:
|
||||
return 'discount'
|
||||
if v <= 1.1:
|
||||
return 'near-nav'
|
||||
return ''
|
||||
|
||||
|
||||
def render_portfolio_page(coins: list[dict], now: datetime) -> str:
|
||||
if not coins:
|
||||
body = '<div style="padding:40px;text-align:center;color:#8b949e;">No coin data yet.</div>'
|
||||
return render_page("Portfolio", "Ownership coin portfolio", "/portfolio", body,
|
||||
extra_css=CSS, timestamp=now.strftime("%Y-%m-%d %H:%M UTC"))
|
||||
|
||||
total_mcap = sum(c.get('market_cap_usd') or 0 for c in coins)
|
||||
total_treasury = sum(c.get('treasury_usd') or 0 for c in coins)
|
||||
|
||||
hero_chart = """
|
||||
<div class="hero-chart">
|
||||
<h2>Price / NAV per Token</h2>
|
||||
<div class="range-btns">
|
||||
<button class="range-btn" onclick="setRange(this, 30)">30d</button>
|
||||
<button class="range-btn active" onclick="setRange(this, 90)">90d</button>
|
||||
<button class="range-btn" onclick="setRange(this, 180)">180d</button>
|
||||
<button class="range-btn" onclick="setRange(this, 365)">All</button>
|
||||
</div>
|
||||
<canvas id="ratio-chart" height="320" style="max-height:320px"></canvas>
|
||||
</div>
|
||||
"""
|
||||
|
||||
header = """<div class="ptable-wrap"><table class="ptable" id="coin-table">
|
||||
<thead><tr>
|
||||
<th data-col="name">Coin</th>
|
||||
<th data-col="price">Price</th>
|
||||
<th data-col="nav">NAV / Token</th>
|
||||
<th data-col="ratio">Price / NAV</th>
|
||||
<th data-col="treasury">Treasury</th>
|
||||
<th data-col="mcap">Market Cap</th>
|
||||
</tr></thead><tbody>"""
|
||||
|
||||
rows = ''
|
||||
for c in coins:
|
||||
name = c.get('name', '?')
|
||||
ticker = c.get('ticker', '')
|
||||
price = c.get('price_usd')
|
||||
nav = c.get('nav_per_token')
|
||||
ratio = c.get('price_nav_ratio')
|
||||
treasury = c.get('treasury_usd')
|
||||
mcap = c.get('market_cap_usd')
|
||||
|
||||
label = esc(name)
|
||||
if ticker:
|
||||
label += f' <span style="color:#8b949e;font-size:11px;">{esc(ticker)}</span>'
|
||||
|
||||
rows += f"""<tr>
|
||||
<td>{label}</td>
|
||||
<td>{_fmt_price(price)}</td>
|
||||
<td>{_fmt_price(nav)}</td>
|
||||
<td class="{_ratio_class(ratio)}">{_fmt_ratio(ratio)}</td>
|
||||
<td>{_fmt_usd(treasury)}</td>
|
||||
<td>{_fmt_usd(mcap)}</td>
|
||||
</tr>"""
|
||||
|
||||
rows += f"""<tr class="summary-row">
|
||||
<td>Total ({len(coins)})</td>
|
||||
<td></td><td></td><td></td>
|
||||
<td>{_fmt_usd(total_treasury)}</td>
|
||||
<td>{_fmt_usd(total_mcap)}</td>
|
||||
</tr>"""
|
||||
|
||||
table = header + rows + '</tbody></table></div>'
|
||||
|
||||
scripts = """<script>
|
||||
const COLORS = ['#58a6ff','#3fb950','#f0883e','#d29922','#f85149','#bc8cff','#39d353','#79c0ff','#ff7b72','#a5d6ff'];
|
||||
let chart = null;
|
||||
|
||||
function setRange(btn, days) {
|
||||
document.querySelectorAll('.range-btn').forEach(b => b.classList.remove('active'));
|
||||
btn.classList.add('active');
|
||||
loadChart(days);
|
||||
}
|
||||
|
||||
function loadChart(days) {
|
||||
fetch('/api/portfolio/nav-ratios?days=' + days)
|
||||
.then(r => r.json())
|
||||
.then(data => {
|
||||
const dates = data.dates || [];
|
||||
const series = data.series || {};
|
||||
|
||||
if (dates.length === 0) {
|
||||
if (chart) chart.destroy();
|
||||
chart = null;
|
||||
const ctx = document.getElementById('ratio-chart').getContext('2d');
|
||||
ctx.fillStyle = '#8b949e';
|
||||
ctx.font = '14px sans-serif';
|
||||
ctx.textAlign = 'center';
|
||||
ctx.fillText('No NAV data yet — accumulating daily snapshots', ctx.canvas.width / 2, 160);
|
||||
return;
|
||||
}
|
||||
|
||||
const sparse = dates.length <= 10;
|
||||
const datasets = [];
|
||||
let i = 0;
|
||||
for (const [name, ratios] of Object.entries(series)) {
|
||||
const hasData = ratios.some(v => v !== null);
|
||||
if (!hasData) { i++; continue; }
|
||||
datasets.push({
|
||||
label: name,
|
||||
data: ratios,
|
||||
borderColor: COLORS[i % COLORS.length],
|
||||
backgroundColor: COLORS[i % COLORS.length] + '33',
|
||||
borderWidth: 2,
|
||||
tension: 0.3,
|
||||
spanGaps: true,
|
||||
pointRadius: sparse ? 4 : 0,
|
||||
pointHoverRadius: 6,
|
||||
fill: false,
|
||||
});
|
||||
i++;
|
||||
}
|
||||
|
||||
if (chart) chart.destroy();
|
||||
const ctx = document.getElementById('ratio-chart').getContext('2d');
|
||||
chart = new Chart(ctx, {
|
||||
type: 'line',
|
||||
data: { labels: dates, datasets },
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
interaction: { mode: 'index', intersect: false },
|
||||
plugins: {
|
||||
legend: { labels: { color: '#8b949e', font: { size: 11 }, usePointStyle: true, boxWidth: 8 }, position: 'top' },
|
||||
tooltip: { mode: 'index', intersect: false,
|
||||
callbacks: { label: ctx => ctx.dataset.label + ': ' + (ctx.parsed.y != null ? ctx.parsed.y.toFixed(2) + 'x' : 'n/a') }
|
||||
},
|
||||
annotation: {
|
||||
annotations: {
|
||||
navLine: {
|
||||
type: 'line',
|
||||
yMin: 1, yMax: 1,
|
||||
borderColor: '#3fb95088',
|
||||
borderWidth: 2,
|
||||
borderDash: [6, 4],
|
||||
label: {
|
||||
display: true,
|
||||
content: '1.0x = NAV',
|
||||
position: 'end',
|
||||
backgroundColor: '#3fb95033',
|
||||
color: '#3fb950',
|
||||
font: { size: 10 },
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
x: { ticks: { color: '#8b949e', maxTicksLimit: 12 }, grid: { display: false } },
|
||||
y: { ticks: { color: '#8b949e', callback: v => v.toFixed(1) + 'x' }, grid: { color: '#21262d' },
|
||||
suggestedMin: 0 }
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Table sorting
|
||||
function sortTable(col) {
|
||||
const table = document.getElementById('coin-table');
|
||||
const tbody = table.querySelector('tbody');
|
||||
const rows = Array.from(tbody.querySelectorAll('tr:not(.summary-row)'));
|
||||
const summaryRow = tbody.querySelector('.summary-row');
|
||||
const th = table.querySelectorAll('th')[col];
|
||||
const asc = th.classList.contains('sorted-asc');
|
||||
table.querySelectorAll('th').forEach(h => h.classList.remove('sorted-asc','sorted-desc'));
|
||||
th.classList.add(asc ? 'sorted-desc' : 'sorted-asc');
|
||||
rows.sort((a, b) => {
|
||||
let va = a.cells[col].textContent.replace(/[$,+%x\\u2014]/g,'').trim();
|
||||
let vb = b.cells[col].textContent.replace(/[$,+%x\\u2014]/g,'').trim();
|
||||
const na = parseFloat(va) || 0, nb = parseFloat(vb) || 0;
|
||||
if (col === 0) return asc ? vb.localeCompare(va) : va.localeCompare(vb);
|
||||
return asc ? na - nb : nb - na;
|
||||
});
|
||||
rows.forEach(r => tbody.appendChild(r));
|
||||
if (summaryRow) tbody.appendChild(summaryRow);
|
||||
}
|
||||
document.querySelectorAll('#coin-table th').forEach((th, i) => {
|
||||
th.addEventListener('click', () => sortTable(i));
|
||||
});
|
||||
|
||||
loadChart(90);
|
||||
</script>"""
|
||||
|
||||
body = hero_chart + table
|
||||
return render_page("Portfolio", "Ownership coin portfolio", "/portfolio", body,
|
||||
scripts=scripts, extra_css=CSS,
|
||||
timestamp=now.strftime("%Y-%m-%d %H:%M UTC"))
|
||||
|
||||
|
||||
# ── API handlers ────────────────────────────────────────────────────────────
|
||||
|
||||
def _get_db(request):
|
||||
return request.app["_portfolio_conn"]()
|
||||
|
||||
|
||||
def _compute_nav(row):
|
||||
"""Compute NAV per token and Price/NAV ratio from a snapshot row dict."""
|
||||
treas = (row.get('treasury_multisig_usd') or 0) + (row.get('lp_usdc_total') or 0)
|
||||
adj = row.get('adjusted_circulating_supply') or 0
|
||||
price = row.get('price_usd') or 0
|
||||
nav = treas / adj if adj > 0 else 0
|
||||
ratio = price / nav if nav > 0 else 0
|
||||
return treas, nav, ratio
|
||||
|
||||
|
||||
async def handle_portfolio_page(request):
|
||||
conn = _get_db(request)
|
||||
try:
|
||||
rows = conn.execute("""
|
||||
SELECT * FROM coin_snapshots
|
||||
WHERE snapshot_date = (SELECT MAX(snapshot_date) FROM coin_snapshots)
|
||||
ORDER BY market_cap_usd DESC
|
||||
""").fetchall()
|
||||
coins = []
|
||||
for r in rows:
|
||||
d = dict(r)
|
||||
treas, nav, ratio = _compute_nav(d)
|
||||
d['treasury_usd'] = treas
|
||||
d['nav_per_token'] = nav
|
||||
d['price_nav_ratio'] = ratio
|
||||
coins.append(d)
|
||||
now = datetime.utcnow()
|
||||
html = render_portfolio_page(coins, now)
|
||||
return web.Response(text=html, content_type='text/html')
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
async def handle_nav_ratios(request):
|
||||
"""Server-side computed NAV ratios — only returns dates with valid data."""
|
||||
conn = _get_db(request)
|
||||
try:
|
||||
days = int(request.query.get('days', '90'))
|
||||
rows = conn.execute("""
|
||||
SELECT name, snapshot_date, price_usd, treasury_multisig_usd,
|
||||
lp_usdc_total, adjusted_circulating_supply
|
||||
FROM coin_snapshots
|
||||
WHERE snapshot_date >= date('now', ? || ' days')
|
||||
AND adjusted_circulating_supply IS NOT NULL
|
||||
AND adjusted_circulating_supply > 0
|
||||
ORDER BY name, snapshot_date
|
||||
""", (f'-{days}',)).fetchall()
|
||||
|
||||
coin_ratios = {}
|
||||
all_dates = set()
|
||||
for r in rows:
|
||||
d = dict(r)
|
||||
name = d['name']
|
||||
date = d['snapshot_date']
|
||||
_, nav, ratio = _compute_nav(d)
|
||||
if nav > 0 and ratio > 0:
|
||||
if name not in coin_ratios:
|
||||
coin_ratios[name] = {}
|
||||
coin_ratios[name][date] = round(ratio, 3)
|
||||
all_dates.add(date)
|
||||
|
||||
sorted_dates = sorted(all_dates)
|
||||
series = {}
|
||||
for name, date_map in coin_ratios.items():
|
||||
series[name] = [date_map.get(d) for d in sorted_dates]
|
||||
|
||||
return web.json_response({
|
||||
'dates': sorted_dates,
|
||||
'series': series,
|
||||
})
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
async def handle_portfolio_history(request):
|
||||
conn = _get_db(request)
|
||||
try:
|
||||
days = int(request.query.get('days', '90'))
|
||||
rows = conn.execute("""
|
||||
SELECT * FROM coin_snapshots
|
||||
WHERE snapshot_date >= date('now', ? || ' days')
|
||||
ORDER BY name, snapshot_date
|
||||
""", (f'-{days}',)).fetchall()
|
||||
history = {}
|
||||
for r in rows:
|
||||
d = dict(r)
|
||||
key = d['name']
|
||||
if key not in history:
|
||||
history[key] = []
|
||||
history[key].append(d)
|
||||
return web.json_response({'history': history})
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
async def handle_portfolio_latest(request):
|
||||
conn = _get_db(request)
|
||||
try:
|
||||
rows = conn.execute("""
|
||||
SELECT * FROM coin_snapshots
|
||||
WHERE snapshot_date = (SELECT MAX(snapshot_date) FROM coin_snapshots)
|
||||
ORDER BY market_cap_usd DESC
|
||||
""").fetchall()
|
||||
coins = []
|
||||
for r in rows:
|
||||
d = dict(r)
|
||||
treas, nav, ratio = _compute_nav(d)
|
||||
d['treasury_usd'] = treas
|
||||
d['nav_per_token'] = nav
|
||||
d['price_nav_ratio'] = ratio
|
||||
coins.append(d)
|
||||
return web.json_response({'coins': coins, 'date': coins[0]['snapshot_date'] if coins else None})
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def register_portfolio_routes(app, get_conn):
|
||||
app["_portfolio_conn"] = get_conn
|
||||
app.router.add_get("/portfolio", handle_portfolio_page)
|
||||
app.router.add_get("/api/portfolio/nav-ratios", handle_nav_ratios)
|
||||
app.router.add_get("/api/portfolio/history", handle_portfolio_history)
|
||||
app.router.add_get("/api/portfolio/latest", handle_portfolio_latest)
|
||||
|
|
@ -11,6 +11,7 @@ PAGES = [
|
|||
{"path": "/health", "label": "Knowledge Health", "icon": "♥"},
|
||||
{"path": "/agents", "label": "Agents", "icon": "★"},
|
||||
{"path": "/epistemic", "label": "Epistemic", "icon": "⚖"},
|
||||
{"path": "/portfolio", "label": "Portfolio", "icon": "★"},
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
832
fetch_coins.py
Normal file
832
fetch_coins.py
Normal file
|
|
@ -0,0 +1,832 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Ownership Coin Portfolio Data Fetcher
|
||||
|
||||
Reads entity files for token addresses, fetches current and historical
|
||||
price data from DexScreener and CoinGecko, stores daily snapshots in
|
||||
pipeline.db coin_snapshots table.
|
||||
|
||||
Usage:
|
||||
python3 fetch_coins.py --daily # Today's snapshot (current prices + on-chain)
|
||||
python3 fetch_coins.py --backfill # Historical daily prices from CoinGecko
|
||||
python3 fetch_coins.py --backfill-days 90 # Last N days only
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import urllib.request
|
||||
import base58
|
||||
import yaml
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s %(levelname)s %(message)s",
|
||||
)
|
||||
logger = logging.getLogger("fetch_coins")
|
||||
|
||||
MAIN_WORKTREE = Path(os.environ.get("MAIN_WORKTREE", "/opt/teleo-eval/workspaces/main"))
|
||||
DB_PATH = Path(os.environ.get("DB_PATH", "/opt/teleo-eval/pipeline/pipeline.db"))
|
||||
ENTITY_DIR = MAIN_WORKTREE / "entities" / "internet-finance"
|
||||
|
||||
DEXSCREENER_TOKEN_URL = "https://api.dexscreener.com/tokens/v1/solana/{mint}"
|
||||
COINGECKO_HISTORY_URL = (
|
||||
"https://api.coingecko.com/api/v3/coins/solana/contract/{mint}"
|
||||
"/market_chart?vs_currency=usd&days={days}"
|
||||
)
|
||||
COINGECKO_RATE_LIMIT = 6.0 # seconds between requests (free tier — 10-15 req/min)
|
||||
|
||||
USDC_MINT = "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"
|
||||
SOLANA_RPC = "https://api.mainnet-beta.solana.com"
|
||||
|
||||
|
||||
def _http_get_json(url, retries=3, timeout=15):
|
||||
for attempt in range(retries + 1):
|
||||
try:
|
||||
req = urllib.request.Request(url, headers={
|
||||
"Accept": "application/json",
|
||||
"User-Agent": "teleo-portfolio/1.0",
|
||||
})
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
return json.loads(resp.read())
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 429 and attempt < retries:
|
||||
wait = 15 * (attempt + 1)
|
||||
logger.info("Rate limited, waiting %ds...", wait)
|
||||
time.sleep(wait)
|
||||
continue
|
||||
logger.warning("HTTP %d for %s", e.code, url[:80])
|
||||
return None
|
||||
except Exception as e:
|
||||
if attempt < retries:
|
||||
time.sleep(2 ** attempt)
|
||||
continue
|
||||
logger.warning("HTTP GET failed after %d attempts: %s — %s", retries + 1, url[:80], e)
|
||||
return None
|
||||
|
||||
|
||||
def load_ownership_coins():
|
||||
"""Read entity files and return list of coin dicts with chain data."""
|
||||
coins = []
|
||||
for f in sorted(ENTITY_DIR.glob("*.md")):
|
||||
content = f.read_text()
|
||||
if "---" not in content:
|
||||
continue
|
||||
parts = content.split("---", 2)
|
||||
if len(parts) < 3:
|
||||
continue
|
||||
try:
|
||||
fm = yaml.safe_load(parts[1])
|
||||
except Exception:
|
||||
continue
|
||||
if not isinstance(fm, dict):
|
||||
continue
|
||||
if fm.get("subtype") != "ownership-coin":
|
||||
continue
|
||||
|
||||
chain = fm.get("chain") or {}
|
||||
raise_data = fm.get("raise") or {}
|
||||
ops = fm.get("operations") or {}
|
||||
liq = fm.get("liquidation") or {}
|
||||
|
||||
coins.append({
|
||||
"name": fm.get("name", f.stem),
|
||||
"ticker": fm.get("ticker"),
|
||||
"status": fm.get("status", "unknown"),
|
||||
"token_mint": chain.get("token_mint"),
|
||||
"treasury_multisig": chain.get("treasury_multisig"),
|
||||
"lp_pools": chain.get("lp_pools") or [],
|
||||
"vesting_wallets": chain.get("vesting_wallets") or [],
|
||||
"investor_locked_tokens": chain.get("investor_locked_tokens") or 0,
|
||||
"meteora_seed_tokens": chain.get("meteora_seed_tokens") or 0,
|
||||
"initial_price": raise_data.get("initial_token_price_usd"),
|
||||
"amount_raised": raise_data.get("amount_raised_usd"),
|
||||
"monthly_allowance": ops.get("monthly_allowance_usd"),
|
||||
"liquidation_date": liq.get("date"),
|
||||
"liquidation_return": liq.get("return_per_dollar"),
|
||||
"file": f.name,
|
||||
})
|
||||
|
||||
return coins
|
||||
|
||||
|
||||
def ensure_schema(conn):
|
||||
"""Create coin_snapshots table if it doesn't exist."""
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS coin_snapshots (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
snapshot_date TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
ticker TEXT,
|
||||
token_mint TEXT,
|
||||
status TEXT,
|
||||
price_usd REAL,
|
||||
market_cap_usd REAL,
|
||||
fdv_usd REAL,
|
||||
circulating_supply REAL,
|
||||
total_supply REAL,
|
||||
volume_24h_usd REAL,
|
||||
liquidity_usd REAL,
|
||||
treasury_multisig_usd REAL,
|
||||
lp_usdc_total REAL,
|
||||
lp_pools_detail TEXT,
|
||||
equity_value_usd REAL,
|
||||
initial_price_usd REAL,
|
||||
amount_raised_usd REAL,
|
||||
monthly_allowance_usd REAL,
|
||||
effective_liq_price REAL,
|
||||
delta_pct REAL,
|
||||
months_runway REAL,
|
||||
protocol_owned_tokens REAL,
|
||||
adjusted_circulating_supply REAL,
|
||||
data_source TEXT,
|
||||
fetched_at TEXT NOT NULL,
|
||||
UNIQUE(snapshot_date, name)
|
||||
)
|
||||
""")
|
||||
for col in ("protocol_owned_tokens", "adjusted_circulating_supply", "treasury_protocol_tokens", "vesting_tokens"):
|
||||
try:
|
||||
conn.execute(f"ALTER TABLE coin_snapshots ADD COLUMN {col} REAL")
|
||||
except sqlite3.OperationalError:
|
||||
pass
|
||||
conn.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_coin_snapshots_date
|
||||
ON coin_snapshots(snapshot_date)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_coin_snapshots_name
|
||||
ON coin_snapshots(name)
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
|
||||
def fetch_dexscreener(mint):
|
||||
"""Get current price, mcap, fdv, volume, liquidity from DexScreener."""
|
||||
url = DEXSCREENER_TOKEN_URL.format(mint=mint)
|
||||
data = _http_get_json(url)
|
||||
if not data:
|
||||
return None
|
||||
|
||||
pairs = data if isinstance(data, list) else data.get("pairs", [])
|
||||
if not pairs:
|
||||
return None
|
||||
|
||||
# Use highest-liquidity pair
|
||||
best = max(pairs, key=lambda p: (p.get("liquidity") or {}).get("usd", 0))
|
||||
liq = best.get("liquidity") or {}
|
||||
|
||||
return {
|
||||
"price_usd": float(best["priceUsd"]) if best.get("priceUsd") else None,
|
||||
"market_cap_usd": best.get("marketCap"),
|
||||
"fdv_usd": best.get("fdv"),
|
||||
"volume_24h_usd": (best.get("volume") or {}).get("h24"),
|
||||
"liquidity_usd": liq.get("usd"),
|
||||
"circulating_supply": None, # DexScreener doesn't provide this directly
|
||||
"total_supply": None,
|
||||
}
|
||||
|
||||
|
||||
def fetch_coingecko_history(mint, days=365):
|
||||
"""Get daily price history from CoinGecko."""
|
||||
url = COINGECKO_HISTORY_URL.format(mint=mint, days=days)
|
||||
data = _http_get_json(url)
|
||||
if not data or "prices" not in data:
|
||||
return []
|
||||
|
||||
daily = {}
|
||||
for ts_ms, price in data["prices"]:
|
||||
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
||||
date_str = dt.strftime("%Y-%m-%d")
|
||||
daily[date_str] = price # last value for that day wins (CoinGecko returns multiple per day)
|
||||
|
||||
market_caps = {}
|
||||
for ts_ms, mc in data.get("market_caps", []):
|
||||
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
||||
date_str = dt.strftime("%Y-%m-%d")
|
||||
market_caps[date_str] = mc
|
||||
|
||||
volumes = {}
|
||||
for ts_ms, vol in data.get("total_volumes", []):
|
||||
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
||||
date_str = dt.strftime("%Y-%m-%d")
|
||||
volumes[date_str] = vol
|
||||
|
||||
result = []
|
||||
for date_str in sorted(daily.keys()):
|
||||
result.append({
|
||||
"date": date_str,
|
||||
"price_usd": daily[date_str],
|
||||
"market_cap_usd": market_caps.get(date_str),
|
||||
"volume_24h_usd": volumes.get(date_str),
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def fetch_solana_token_supply(mint):
|
||||
"""Get token supply from Solana RPC."""
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "getTokenSupply",
|
||||
"params": [mint],
|
||||
}
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=10) as resp:
|
||||
data = json.loads(resp.read())
|
||||
val = data.get("result", {}).get("value", {})
|
||||
amount = val.get("uiAmount")
|
||||
return {"total_supply": amount}
|
||||
except Exception as e:
|
||||
logger.warning("Solana RPC getTokenSupply failed for %s: %s", mint[:12], e)
|
||||
return {}
|
||||
|
||||
|
||||
def fetch_solana_usdc_balance(wallet_address):
|
||||
"""Get USDC balance for a wallet from Solana RPC."""
|
||||
if not wallet_address:
|
||||
return None
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "getTokenAccountsByOwner",
|
||||
"params": [
|
||||
wallet_address,
|
||||
{"mint": USDC_MINT},
|
||||
{"encoding": "jsonParsed"},
|
||||
],
|
||||
}
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=10) as resp:
|
||||
data = json.loads(resp.read())
|
||||
accounts = data.get("result", {}).get("value", [])
|
||||
total = 0.0
|
||||
for acct in accounts:
|
||||
info = acct.get("account", {}).get("data", {}).get("parsed", {}).get("info", {})
|
||||
token_amount = info.get("tokenAmount", {})
|
||||
total += float(token_amount.get("uiAmount", 0))
|
||||
return total
|
||||
except Exception as e:
|
||||
logger.warning("Solana RPC USDC balance failed for %s: %s", wallet_address[:12], e)
|
||||
return None
|
||||
|
||||
|
||||
def fetch_solana_token_balance(wallet_address, token_mint):
|
||||
"""Get balance of a specific SPL token for a wallet from Solana RPC."""
|
||||
if not wallet_address or not token_mint:
|
||||
return None
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "getTokenAccountsByOwner",
|
||||
"params": [
|
||||
wallet_address,
|
||||
{"mint": token_mint},
|
||||
{"encoding": "jsonParsed"},
|
||||
],
|
||||
}
|
||||
for attempt in range(3):
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=10) as resp:
|
||||
data = json.loads(resp.read())
|
||||
if "error" in data:
|
||||
code = data["error"].get("code", 0)
|
||||
if code == 429 and attempt < 2:
|
||||
wait = 10 * (attempt + 1)
|
||||
logger.info("RPC rate limited for %s, retrying in %ds...", wallet_address[:12], wait)
|
||||
time.sleep(wait)
|
||||
continue
|
||||
logger.warning("RPC error for %s: %s", wallet_address[:12], data["error"])
|
||||
return None
|
||||
accounts = data.get("result", {}).get("value", [])
|
||||
total = 0.0
|
||||
for acct in accounts:
|
||||
info = acct.get("account", {}).get("data", {}).get("parsed", {}).get("info", {})
|
||||
token_amount = info.get("tokenAmount", {})
|
||||
total += float(token_amount.get("uiAmount", 0))
|
||||
return total
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 429 and attempt < 2:
|
||||
wait = 10 * (attempt + 1)
|
||||
logger.info("RPC 429 for %s, retrying in %ds...", wallet_address[:12], wait)
|
||||
time.sleep(wait)
|
||||
continue
|
||||
logger.warning("Solana RPC token balance failed for %s (mint %s): %s",
|
||||
wallet_address[:12], token_mint[:12], e)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning("Solana RPC token balance failed for %s (mint %s): %s",
|
||||
wallet_address[:12], token_mint[:12], e)
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
|
||||
# Meteora program IDs
|
||||
METEORA_CPAMM = "cpamdpZCGKUy5JxQXB4dcpGPiikHawvSWAd6mEn1sGG"
|
||||
METEORA_DLMM = "LBUZKhRxPF3XUpBCjp4YzTKgLccjZhTSDM9YuVaPwxo"
|
||||
# CPAMM: vault_a at byte 232, vault_b at byte 264
|
||||
# DLMM: reserve_x at byte 152, reserve_y at byte 184
|
||||
|
||||
def _resolve_meteora_vaults(pool_address):
|
||||
"""For Meteora pools, read account data to find actual token vaults.
|
||||
|
||||
Returns (vault_a_addr, vault_b_addr, program_type) or (None, None, None).
|
||||
"""
|
||||
import base64
|
||||
payload = {
|
||||
"jsonrpc": "2.0", "id": 1,
|
||||
"method": "getAccountInfo",
|
||||
"params": [pool_address, {"encoding": "base64"}],
|
||||
}
|
||||
for attempt in range(3):
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=15) as resp:
|
||||
data = json.loads(resp.read())
|
||||
if "error" in data:
|
||||
code = data["error"].get("code", 0)
|
||||
if code == 429 and attempt < 2:
|
||||
time.sleep(10 * (attempt + 1))
|
||||
continue
|
||||
return None, None, None
|
||||
val = data.get("result", {}).get("value")
|
||||
if not val:
|
||||
return None, None, None
|
||||
owner = val.get("owner", "")
|
||||
raw = base64.b64decode(val["data"][0])
|
||||
|
||||
if owner == METEORA_CPAMM and len(raw) >= 296:
|
||||
va = base58.b58encode(raw[232:264]).decode()
|
||||
vb = base58.b58encode(raw[264:296]).decode()
|
||||
return va, vb, "cpamm"
|
||||
elif owner == METEORA_DLMM and len(raw) >= 216:
|
||||
va = base58.b58encode(raw[152:184]).decode()
|
||||
vb = base58.b58encode(raw[184:216]).decode()
|
||||
return va, vb, "dlmm"
|
||||
return None, None, None
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 429 and attempt < 2:
|
||||
time.sleep(10 * (attempt + 1))
|
||||
continue
|
||||
return None, None, None
|
||||
except Exception:
|
||||
return None, None, None
|
||||
return None, None, None
|
||||
|
||||
|
||||
def _fetch_vault_balance(vault_address):
|
||||
"""Get token balance from a vault/reserve account. Returns (mint, amount) or (None, 0)."""
|
||||
payload = {
|
||||
"jsonrpc": "2.0", "id": 1,
|
||||
"method": "getAccountInfo",
|
||||
"params": [vault_address, {"encoding": "jsonParsed"}],
|
||||
}
|
||||
for attempt in range(3):
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=15) as resp:
|
||||
data = json.loads(resp.read())
|
||||
if "error" in data:
|
||||
code = data["error"].get("code", 0)
|
||||
if code == 429 and attempt < 2:
|
||||
time.sleep(10 * (attempt + 1))
|
||||
continue
|
||||
return None, 0.0
|
||||
val = data.get("result", {}).get("value")
|
||||
if not val or not isinstance(val.get("data"), dict):
|
||||
return None, 0.0
|
||||
info = val["data"]["parsed"]["info"]
|
||||
mint = info["mint"]
|
||||
amt = float(info["tokenAmount"]["uiAmountString"])
|
||||
return mint, amt
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 429 and attempt < 2:
|
||||
time.sleep(10 * (attempt + 1))
|
||||
continue
|
||||
return None, 0.0
|
||||
except Exception:
|
||||
return None, 0.0
|
||||
return None, 0.0
|
||||
|
||||
|
||||
def fetch_lp_wallet_balances(lp_pools, token_mint):
|
||||
"""Query LP wallets for USDC balance and protocol-owned tokens.
|
||||
|
||||
Returns (lp_usdc_total, protocol_owned_tokens, lp_details_list).
|
||||
"""
|
||||
if not lp_pools:
|
||||
return 0.0, 0.0, []
|
||||
|
||||
total_usdc = 0.0
|
||||
total_protocol_tokens = 0.0
|
||||
details = []
|
||||
|
||||
for pool in lp_pools:
|
||||
address = pool.get("address")
|
||||
dex = pool.get("dex", "unknown")
|
||||
if not address:
|
||||
continue
|
||||
|
||||
pool_usdc = 0.0
|
||||
pool_tokens = 0.0
|
||||
|
||||
# Try Meteora vault resolution first (CPAMM + DLMM)
|
||||
if dex == "meteora":
|
||||
vault_a, vault_b, prog_type = _resolve_meteora_vaults(address)
|
||||
if vault_a and vault_b:
|
||||
logger.info("Meteora %s pool %s: vaults %s, %s", prog_type, address[:12], vault_a[:12], vault_b[:12])
|
||||
time.sleep(2)
|
||||
for vault_addr in [vault_a, vault_b]:
|
||||
mint, amt = _fetch_vault_balance(vault_addr)
|
||||
if mint and amt > 0:
|
||||
if mint == USDC_MINT:
|
||||
pool_usdc += amt
|
||||
elif token_mint and mint == token_mint:
|
||||
pool_tokens += amt
|
||||
time.sleep(2)
|
||||
else:
|
||||
logger.warning("Meteora vault resolution failed for %s, falling back to getTokenAccountsByOwner", address[:12])
|
||||
|
||||
# Fallback: getTokenAccountsByOwner (works for futarchy-amm and non-Meteora pools)
|
||||
if pool_usdc == 0 and pool_tokens == 0:
|
||||
payload = {
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"method": "getTokenAccountsByOwner",
|
||||
"params": [
|
||||
address,
|
||||
{"programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"},
|
||||
{"encoding": "jsonParsed"},
|
||||
],
|
||||
}
|
||||
for attempt in range(3):
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
SOLANA_RPC,
|
||||
data=json.dumps(payload).encode(),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=15) as resp:
|
||||
data = json.loads(resp.read())
|
||||
if "error" in data:
|
||||
code = data["error"].get("code", 0)
|
||||
if code == 429 and attempt < 2:
|
||||
logger.info("RPC rate limited for %s, retrying in %ds...", address[:12], 5 * (attempt + 1))
|
||||
time.sleep(10 * (attempt + 1))
|
||||
continue
|
||||
logger.warning("RPC error for LP %s: %s", address[:12], data["error"])
|
||||
break
|
||||
for acct in data.get("result", {}).get("value", []):
|
||||
info = acct["account"]["data"]["parsed"]["info"]
|
||||
mint = info["mint"]
|
||||
amt = float(info["tokenAmount"]["uiAmountString"])
|
||||
if amt == 0:
|
||||
continue
|
||||
if mint == USDC_MINT:
|
||||
pool_usdc += amt
|
||||
elif token_mint and mint == token_mint:
|
||||
pool_tokens += amt
|
||||
break
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 429 and attempt < 2:
|
||||
wait = 5 * (attempt + 1)
|
||||
logger.info("RPC 429 for %s, retrying in %ds...", address[:12], wait)
|
||||
time.sleep(wait * 2)
|
||||
continue
|
||||
logger.warning("LP wallet query failed for %s (%s): %s", dex, address[:12], e)
|
||||
break
|
||||
except Exception as e:
|
||||
logger.warning("LP wallet query failed for %s (%s): %s", dex, address[:12], e)
|
||||
break
|
||||
|
||||
total_usdc += pool_usdc
|
||||
total_protocol_tokens += pool_tokens
|
||||
details.append({
|
||||
"dex": dex,
|
||||
"address": address,
|
||||
"usdc": round(pool_usdc, 2),
|
||||
"protocol_tokens": round(pool_tokens, 2),
|
||||
})
|
||||
time.sleep(5)
|
||||
|
||||
return total_usdc, total_protocol_tokens, details
|
||||
|
||||
|
||||
def compute_derived(row, coin):
|
||||
"""Compute effective liquidation price, delta, equity, runway."""
|
||||
price = row.get("price_usd")
|
||||
treasury = row.get("treasury_multisig_usd") or 0
|
||||
lp_total = row.get("lp_usdc_total") or 0
|
||||
mcap = row.get("market_cap_usd") or 0
|
||||
monthly = coin.get("monthly_allowance")
|
||||
protocol_tokens = row.get("protocol_owned_tokens") or 0
|
||||
total_supply = row.get("total_supply")
|
||||
|
||||
cash_total = treasury + lp_total
|
||||
|
||||
adj_circ = row.get("adjusted_circulating_supply")
|
||||
if not adj_circ and total_supply and total_supply > 0:
|
||||
adj_circ = total_supply - protocol_tokens
|
||||
row["adjusted_circulating_supply"] = adj_circ
|
||||
|
||||
if adj_circ and adj_circ > 0:
|
||||
row["effective_liq_price"] = cash_total / adj_circ
|
||||
if price and price > 0:
|
||||
row["market_cap_usd"] = price * adj_circ
|
||||
mcap = row["market_cap_usd"]
|
||||
if price and price > 0 and row.get("effective_liq_price"):
|
||||
row["delta_pct"] = ((row["effective_liq_price"] / price) - 1) * 100
|
||||
|
||||
row["equity_value_usd"] = mcap - cash_total if mcap else None
|
||||
|
||||
if monthly and monthly > 0 and treasury:
|
||||
row["months_runway"] = treasury / monthly
|
||||
|
||||
return row
|
||||
|
||||
|
||||
def upsert_snapshot(conn, row):
|
||||
"""Insert or replace a daily snapshot."""
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO coin_snapshots (
|
||||
snapshot_date, name, ticker, token_mint, status,
|
||||
price_usd, market_cap_usd, fdv_usd,
|
||||
circulating_supply, total_supply,
|
||||
volume_24h_usd, liquidity_usd,
|
||||
treasury_multisig_usd, lp_usdc_total, lp_pools_detail,
|
||||
equity_value_usd, initial_price_usd, amount_raised_usd,
|
||||
monthly_allowance_usd, effective_liq_price, delta_pct,
|
||||
months_runway, protocol_owned_tokens, adjusted_circulating_supply,
|
||||
treasury_protocol_tokens, vesting_tokens,
|
||||
data_source, fetched_at
|
||||
) VALUES (
|
||||
:snapshot_date, :name, :ticker, :token_mint, :status,
|
||||
:price_usd, :market_cap_usd, :fdv_usd,
|
||||
:circulating_supply, :total_supply,
|
||||
:volume_24h_usd, :liquidity_usd,
|
||||
:treasury_multisig_usd, :lp_usdc_total, :lp_pools_detail,
|
||||
:equity_value_usd, :initial_price_usd, :amount_raised_usd,
|
||||
:monthly_allowance_usd, :effective_liq_price, :delta_pct,
|
||||
:months_runway, :protocol_owned_tokens, :adjusted_circulating_supply,
|
||||
:treasury_protocol_tokens, :vesting_tokens,
|
||||
:data_source, :fetched_at
|
||||
)
|
||||
""", row)
|
||||
|
||||
|
||||
def cmd_daily(coins, conn):
|
||||
"""Fetch current data for all coins and store today's snapshot."""
|
||||
today = datetime.date.today().isoformat()
|
||||
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
||||
|
||||
for coin in coins:
|
||||
mint = coin["token_mint"]
|
||||
if not mint:
|
||||
logger.info("Skipping %s — no token mint", coin["name"])
|
||||
continue
|
||||
|
||||
logger.info("Fetching %s (%s)...", coin["name"], coin["ticker"])
|
||||
|
||||
# Current price from DexScreener
|
||||
dex = fetch_dexscreener(mint)
|
||||
if not dex:
|
||||
logger.warning("DexScreener returned nothing for %s — trying last known price", coin["name"])
|
||||
last_row = conn.execute(
|
||||
"SELECT price_usd FROM coin_snapshots WHERE name=? AND price_usd IS NOT NULL ORDER BY snapshot_date DESC LIMIT 1",
|
||||
(coin["name"],)
|
||||
).fetchone()
|
||||
if last_row and last_row[0]:
|
||||
dex = {"price_usd": last_row[0], "market_cap_usd": None, "fdv_usd": None, "volume_24h_usd": None, "liquidity_usd": None, "circulating_supply": None, "total_supply": None}
|
||||
logger.info(" Using last known price: $%.4f", last_row[0])
|
||||
else:
|
||||
logger.warning(" No historical price either — skipping %s", coin["name"])
|
||||
continue
|
||||
|
||||
# Token supply from Solana RPC
|
||||
supply = fetch_solana_token_supply(mint)
|
||||
time.sleep(4)
|
||||
|
||||
# Treasury USDC balance + protocol token balance
|
||||
treasury_usd = None
|
||||
treasury_tokens = 0.0
|
||||
if coin.get("treasury_multisig"):
|
||||
treasury_usd = fetch_solana_usdc_balance(coin["treasury_multisig"])
|
||||
time.sleep(2)
|
||||
treas_tok = fetch_solana_token_balance(coin["treasury_multisig"], mint)
|
||||
if treas_tok and treas_tok > 0:
|
||||
treasury_tokens = treas_tok
|
||||
logger.info(" %s treasury holds %.0f protocol tokens", coin["name"], treasury_tokens)
|
||||
time.sleep(2)
|
||||
|
||||
time.sleep(4)
|
||||
|
||||
# Vesting wallet scanning — tokens locked in vesting contracts
|
||||
vesting_tokens = 0.0
|
||||
if coin.get("vesting_wallets"):
|
||||
for vw in coin["vesting_wallets"]:
|
||||
vw_addr = vw.get("address") if isinstance(vw, dict) else vw
|
||||
if not vw_addr:
|
||||
continue
|
||||
vt = fetch_solana_token_balance(vw_addr, mint)
|
||||
if vt and vt > 0:
|
||||
vesting_tokens += vt
|
||||
label = vw.get("label", vw_addr[:12]) if isinstance(vw, dict) else vw_addr[:12]
|
||||
logger.info(" %s vesting wallet (%s) holds %.0f tokens", coin["name"], label, vt)
|
||||
time.sleep(2)
|
||||
|
||||
# LP pool balances — query each wallet for USDC + protocol-owned tokens
|
||||
lp_total = 0.0
|
||||
protocol_tokens = 0.0
|
||||
lp_detail = None
|
||||
if coin.get("lp_pools"):
|
||||
lp_total, protocol_tokens, lp_details_list = fetch_lp_wallet_balances(
|
||||
coin["lp_pools"], mint
|
||||
)
|
||||
lp_detail = json.dumps(lp_details_list) if lp_details_list else None
|
||||
|
||||
total_supply = supply.get("total_supply")
|
||||
|
||||
# Adjusted circulating supply: total - LP tokens - treasury tokens
|
||||
investor_locked = float(coin.get("investor_locked_tokens") or 0)
|
||||
meteora_seed = float(coin.get("meteora_seed_tokens") or 0)
|
||||
all_protocol_tokens = protocol_tokens + treasury_tokens + vesting_tokens + investor_locked + meteora_seed
|
||||
if investor_locked > 0:
|
||||
logger.info(" %s investor locked tokens: %.0f", coin["name"], investor_locked)
|
||||
if meteora_seed > 0:
|
||||
logger.info(" %s meteora seed tokens: %.0f", coin["name"], meteora_seed)
|
||||
adj_circ = None
|
||||
if total_supply and total_supply > 0:
|
||||
adj_circ = total_supply - all_protocol_tokens
|
||||
|
||||
# If we have adj_circ and price but no mcap, compute from adjusted supply
|
||||
if adj_circ and dex.get("price_usd"):
|
||||
dex["market_cap_usd"] = adj_circ * dex["price_usd"]
|
||||
elif total_supply and dex.get("price_usd") and not dex.get("market_cap_usd"):
|
||||
dex["market_cap_usd"] = total_supply * dex["price_usd"]
|
||||
|
||||
row = {
|
||||
"snapshot_date": today,
|
||||
"name": coin["name"],
|
||||
"ticker": coin["ticker"],
|
||||
"token_mint": mint,
|
||||
"status": coin["status"],
|
||||
"price_usd": dex.get("price_usd"),
|
||||
"market_cap_usd": dex.get("market_cap_usd"),
|
||||
"fdv_usd": dex.get("fdv_usd"),
|
||||
"circulating_supply": dex.get("circulating_supply"),
|
||||
"total_supply": total_supply,
|
||||
"volume_24h_usd": dex.get("volume_24h_usd"),
|
||||
"liquidity_usd": dex.get("liquidity_usd"),
|
||||
"treasury_multisig_usd": treasury_usd,
|
||||
"lp_usdc_total": lp_total if lp_total else None,
|
||||
"lp_pools_detail": lp_detail,
|
||||
"equity_value_usd": None,
|
||||
"initial_price_usd": coin.get("initial_price"),
|
||||
"amount_raised_usd": coin.get("amount_raised"),
|
||||
"monthly_allowance_usd": coin.get("monthly_allowance"),
|
||||
"effective_liq_price": None,
|
||||
"delta_pct": None,
|
||||
"months_runway": None,
|
||||
"protocol_owned_tokens": all_protocol_tokens if all_protocol_tokens else None,
|
||||
"treasury_protocol_tokens": treasury_tokens if treasury_tokens else None,
|
||||
"vesting_tokens": vesting_tokens if vesting_tokens else None,
|
||||
"adjusted_circulating_supply": adj_circ,
|
||||
"data_source": "dexscreener+solana_rpc",
|
||||
"fetched_at": now,
|
||||
}
|
||||
|
||||
row = compute_derived(row, coin)
|
||||
upsert_snapshot(conn, row)
|
||||
lp_msg = f" lp_usdc=${row.get('lp_usdc_total') or 0:,.0f} lp_tokens={protocol_tokens:,.0f} treas_tokens={treasury_tokens:,.0f}" if row.get("lp_usdc_total") or treasury_tokens else ""
|
||||
logger.info(" %s: $%.4f mcap=$%s adj_circ=%s%s",
|
||||
coin["name"], row["price_usd"] or 0,
|
||||
f'{row["market_cap_usd"]:,.0f}' if row["market_cap_usd"] else "N/A",
|
||||
f'{row["adjusted_circulating_supply"]:,.0f}' if row.get("adjusted_circulating_supply") else "N/A",
|
||||
lp_msg)
|
||||
time.sleep(1)
|
||||
|
||||
conn.commit()
|
||||
logger.info("Daily snapshot complete for %s", today)
|
||||
|
||||
|
||||
def cmd_backfill(coins, conn, days=365):
|
||||
"""Backfill historical daily prices from CoinGecko."""
|
||||
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
||||
|
||||
for coin in coins:
|
||||
mint = coin["token_mint"]
|
||||
if not mint:
|
||||
logger.info("Skipping %s — no token mint", coin["name"])
|
||||
continue
|
||||
|
||||
logger.info("Backfilling %s (%s) — %d days...", coin["name"], coin["ticker"], days)
|
||||
history = fetch_coingecko_history(mint, days=days)
|
||||
|
||||
if not history:
|
||||
logger.warning("No CoinGecko history for %s", coin["name"])
|
||||
time.sleep(COINGECKO_RATE_LIMIT)
|
||||
continue
|
||||
|
||||
inserted = 0
|
||||
for point in history:
|
||||
row = {
|
||||
"snapshot_date": point["date"],
|
||||
"name": coin["name"],
|
||||
"ticker": coin["ticker"],
|
||||
"token_mint": mint,
|
||||
"status": coin["status"],
|
||||
"price_usd": point["price_usd"],
|
||||
"market_cap_usd": point.get("market_cap_usd"),
|
||||
"fdv_usd": None,
|
||||
"circulating_supply": None,
|
||||
"total_supply": None,
|
||||
"volume_24h_usd": point.get("volume_24h_usd"),
|
||||
"liquidity_usd": None,
|
||||
"treasury_multisig_usd": None,
|
||||
"lp_usdc_total": None,
|
||||
"lp_pools_detail": None,
|
||||
"equity_value_usd": None,
|
||||
"initial_price_usd": coin.get("initial_price"),
|
||||
"amount_raised_usd": coin.get("amount_raised"),
|
||||
"monthly_allowance_usd": coin.get("monthly_allowance"),
|
||||
"effective_liq_price": None,
|
||||
"delta_pct": None,
|
||||
"months_runway": None,
|
||||
"protocol_owned_tokens": None,
|
||||
"adjusted_circulating_supply": None,
|
||||
"treasury_protocol_tokens": None,
|
||||
"vesting_tokens": None,
|
||||
"data_source": "coingecko_history",
|
||||
"fetched_at": now,
|
||||
}
|
||||
upsert_snapshot(conn, row)
|
||||
inserted += 1
|
||||
|
||||
conn.commit()
|
||||
logger.info(" %s: %d daily snapshots inserted", coin["name"], inserted)
|
||||
time.sleep(COINGECKO_RATE_LIMIT)
|
||||
|
||||
logger.info("Backfill complete")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Ownership coin portfolio data fetcher")
|
||||
parser.add_argument("--daily", action="store_true", help="Fetch today's snapshot")
|
||||
parser.add_argument("--backfill", action="store_true", help="Backfill historical prices")
|
||||
parser.add_argument("--backfill-days", type=int, default=365, help="Days to backfill (default: 365)")
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.daily and not args.backfill:
|
||||
parser.error("Specify --daily or --backfill")
|
||||
|
||||
coins = load_ownership_coins()
|
||||
logger.info("Loaded %d ownership coins (%d with token mints)",
|
||||
len(coins), sum(1 for c in coins if c["token_mint"]))
|
||||
|
||||
conn = sqlite3.connect(str(DB_PATH), timeout=30)
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA busy_timeout=30000")
|
||||
ensure_schema(conn)
|
||||
|
||||
try:
|
||||
if args.backfill:
|
||||
cmd_backfill(coins, conn, days=args.backfill_days)
|
||||
if args.daily:
|
||||
cmd_daily(coins, conn)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in a new issue