Some checks are pending
CI / lint-and-test (push) Waiting to run
Ranger was liquidated — no point fetching empty data every cron run. Also purged 1,647 pre-Apr-20 snapshot rows (incomplete NAV data from data collection ramp-up, not actual market movement). Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
841 lines
32 KiB
Python
841 lines
32 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Ownership Coin Portfolio Data Fetcher
|
|
|
|
Reads entity files for token addresses, fetches current and historical
|
|
price data from DexScreener and CoinGecko, stores daily snapshots in
|
|
pipeline.db coin_snapshots table.
|
|
|
|
Usage:
|
|
python3 fetch_coins.py --daily # Today's snapshot (current prices + on-chain)
|
|
python3 fetch_coins.py --backfill # Historical daily prices from CoinGecko
|
|
python3 fetch_coins.py --backfill-days 90 # Last N days only
|
|
"""
|
|
|
|
import argparse
|
|
import datetime
|
|
import json
|
|
import logging
|
|
import os
|
|
import sqlite3
|
|
import sys
|
|
import time
|
|
from pathlib import Path
|
|
|
|
import urllib.request
|
|
import base58
|
|
import yaml
|
|
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(asctime)s %(levelname)s %(message)s",
|
|
)
|
|
logger = logging.getLogger("fetch_coins")
|
|
|
|
MAIN_WORKTREE = Path(os.environ.get("MAIN_WORKTREE", "/opt/teleo-eval/workspaces/main"))
|
|
DB_PATH = Path(os.environ.get("DB_PATH", "/opt/teleo-eval/pipeline/pipeline.db"))
|
|
ENTITY_DIR = MAIN_WORKTREE / "entities" / "internet-finance"
|
|
|
|
DEXSCREENER_TOKEN_URL = "https://api.dexscreener.com/tokens/v1/solana/{mint}"
|
|
COINGECKO_HISTORY_URL = (
|
|
"https://api.coingecko.com/api/v3/coins/solana/contract/{mint}"
|
|
"/market_chart?vs_currency=usd&days={days}"
|
|
)
|
|
COINGECKO_RATE_LIMIT = 6.0 # seconds between requests (free tier — 10-15 req/min)
|
|
|
|
USDC_MINT = "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"
|
|
SOLANA_RPC = "https://api.mainnet-beta.solana.com"
|
|
|
|
|
|
def _http_get_json(url, retries=3, timeout=15):
|
|
for attempt in range(retries + 1):
|
|
try:
|
|
req = urllib.request.Request(url, headers={
|
|
"Accept": "application/json",
|
|
"User-Agent": "teleo-portfolio/1.0",
|
|
})
|
|
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
|
return json.loads(resp.read())
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 429 and attempt < retries:
|
|
wait = 15 * (attempt + 1)
|
|
logger.info("Rate limited, waiting %ds...", wait)
|
|
time.sleep(wait)
|
|
continue
|
|
logger.warning("HTTP %d for %s", e.code, url[:80])
|
|
return None
|
|
except Exception as e:
|
|
if attempt < retries:
|
|
time.sleep(2 ** attempt)
|
|
continue
|
|
logger.warning("HTTP GET failed after %d attempts: %s — %s", retries + 1, url[:80], e)
|
|
return None
|
|
|
|
|
|
def load_ownership_coins():
|
|
"""Read entity files and return list of coin dicts with chain data."""
|
|
coins = []
|
|
for f in sorted(ENTITY_DIR.glob("*.md")):
|
|
content = f.read_text()
|
|
if "---" not in content:
|
|
continue
|
|
parts = content.split("---", 2)
|
|
if len(parts) < 3:
|
|
continue
|
|
try:
|
|
fm = yaml.safe_load(parts[1])
|
|
except Exception:
|
|
continue
|
|
if not isinstance(fm, dict):
|
|
continue
|
|
if fm.get("subtype") != "ownership-coin":
|
|
continue
|
|
if fm.get("status") == "liquidated":
|
|
continue
|
|
|
|
chain = fm.get("chain") or {}
|
|
if isinstance(chain, str):
|
|
chain = {}
|
|
raise_data = fm.get("raise") or {}
|
|
ops = fm.get("operations") or {}
|
|
liq = fm.get("liquidation") or {}
|
|
|
|
coins.append({
|
|
"name": fm.get("name", f.stem),
|
|
"ticker": fm.get("ticker"),
|
|
"status": fm.get("status", "unknown"),
|
|
"token_mint": chain.get("token_mint"),
|
|
"treasury_multisig": chain.get("treasury_multisig"),
|
|
"lp_pools": chain.get("lp_pools") or [],
|
|
"vesting_wallets": chain.get("vesting_wallets") or [],
|
|
"investor_locked_tokens": chain.get("investor_locked_tokens") or 0,
|
|
"meteora_seed_tokens": chain.get("meteora_seed_tokens") or 0,
|
|
"initial_price": raise_data.get("initial_token_price_usd"),
|
|
"amount_raised": raise_data.get("amount_raised_usd"),
|
|
"monthly_allowance": ops.get("monthly_allowance_usd"),
|
|
"liquidation_date": liq.get("date"),
|
|
"liquidation_return": liq.get("return_per_dollar"),
|
|
"file": f.name,
|
|
})
|
|
|
|
return coins
|
|
|
|
|
|
def ensure_schema(conn):
|
|
"""Create coin_snapshots table if it doesn't exist."""
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS coin_snapshots (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
snapshot_date TEXT NOT NULL,
|
|
name TEXT NOT NULL,
|
|
ticker TEXT,
|
|
token_mint TEXT,
|
|
status TEXT,
|
|
price_usd REAL,
|
|
market_cap_usd REAL,
|
|
fdv_usd REAL,
|
|
circulating_supply REAL,
|
|
total_supply REAL,
|
|
volume_24h_usd REAL,
|
|
liquidity_usd REAL,
|
|
treasury_multisig_usd REAL,
|
|
lp_usdc_total REAL,
|
|
lp_pools_detail TEXT,
|
|
equity_value_usd REAL,
|
|
initial_price_usd REAL,
|
|
amount_raised_usd REAL,
|
|
monthly_allowance_usd REAL,
|
|
effective_liq_price REAL,
|
|
delta_pct REAL,
|
|
months_runway REAL,
|
|
protocol_owned_tokens REAL,
|
|
adjusted_circulating_supply REAL,
|
|
data_source TEXT,
|
|
fetched_at TEXT NOT NULL,
|
|
UNIQUE(snapshot_date, name)
|
|
)
|
|
""")
|
|
# Legacy migration — these columns exist in CREATE TABLE but may be missing in older DBs
|
|
for col in ("protocol_owned_tokens", "adjusted_circulating_supply", "treasury_protocol_tokens", "vesting_tokens"):
|
|
try:
|
|
conn.execute(f"ALTER TABLE coin_snapshots ADD COLUMN {col} REAL")
|
|
except sqlite3.OperationalError:
|
|
pass
|
|
conn.execute("""
|
|
CREATE INDEX IF NOT EXISTS idx_coin_snapshots_date
|
|
ON coin_snapshots(snapshot_date)
|
|
""")
|
|
conn.execute("""
|
|
CREATE INDEX IF NOT EXISTS idx_coin_snapshots_name
|
|
ON coin_snapshots(name)
|
|
""")
|
|
conn.commit()
|
|
|
|
|
|
def fetch_dexscreener(mint):
|
|
"""Get current price, mcap, fdv, volume, liquidity from DexScreener."""
|
|
url = DEXSCREENER_TOKEN_URL.format(mint=mint)
|
|
data = _http_get_json(url)
|
|
if not data:
|
|
return None
|
|
|
|
pairs = data if isinstance(data, list) else data.get("pairs", [])
|
|
if not pairs:
|
|
return None
|
|
|
|
# Use highest-liquidity pair
|
|
best = max(pairs, key=lambda p: (p.get("liquidity") or {}).get("usd", 0))
|
|
liq = best.get("liquidity") or {}
|
|
|
|
return {
|
|
"price_usd": float(best["priceUsd"]) if best.get("priceUsd") else None,
|
|
"market_cap_usd": best.get("marketCap"),
|
|
"fdv_usd": best.get("fdv"),
|
|
"volume_24h_usd": (best.get("volume") or {}).get("h24"),
|
|
"liquidity_usd": liq.get("usd"),
|
|
"circulating_supply": None, # DexScreener doesn't provide this directly
|
|
"total_supply": None,
|
|
}
|
|
|
|
|
|
def fetch_coingecko_history(mint, days=365):
|
|
"""Get daily price history from CoinGecko."""
|
|
url = COINGECKO_HISTORY_URL.format(mint=mint, days=days)
|
|
data = _http_get_json(url)
|
|
if not data or "prices" not in data:
|
|
return []
|
|
|
|
daily = {}
|
|
for ts_ms, price in data["prices"]:
|
|
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
|
date_str = dt.strftime("%Y-%m-%d")
|
|
daily[date_str] = price # last value for that day wins (CoinGecko returns multiple per day)
|
|
|
|
market_caps = {}
|
|
for ts_ms, mc in data.get("market_caps", []):
|
|
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
|
date_str = dt.strftime("%Y-%m-%d")
|
|
market_caps[date_str] = mc
|
|
|
|
volumes = {}
|
|
for ts_ms, vol in data.get("total_volumes", []):
|
|
dt = datetime.datetime.fromtimestamp(ts_ms / 1000, tz=datetime.timezone.utc)
|
|
date_str = dt.strftime("%Y-%m-%d")
|
|
volumes[date_str] = vol
|
|
|
|
result = []
|
|
for date_str in sorted(daily.keys()):
|
|
result.append({
|
|
"date": date_str,
|
|
"price_usd": daily[date_str],
|
|
"market_cap_usd": market_caps.get(date_str),
|
|
"volume_24h_usd": volumes.get(date_str),
|
|
})
|
|
|
|
return result
|
|
|
|
|
|
def fetch_solana_token_supply(mint):
|
|
"""Get token supply from Solana RPC."""
|
|
payload = {
|
|
"jsonrpc": "2.0",
|
|
"id": 1,
|
|
"method": "getTokenSupply",
|
|
"params": [mint],
|
|
}
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
try:
|
|
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
data = json.loads(resp.read())
|
|
val = data.get("result", {}).get("value", {})
|
|
amount = val.get("uiAmount")
|
|
return {"total_supply": amount}
|
|
except Exception as e:
|
|
logger.warning("Solana RPC getTokenSupply failed for %s: %s", mint[:12], e)
|
|
return {}
|
|
|
|
|
|
def fetch_solana_usdc_balance(wallet_address):
|
|
"""Get USDC balance for a wallet from Solana RPC."""
|
|
if not wallet_address:
|
|
return None
|
|
payload = {
|
|
"jsonrpc": "2.0",
|
|
"id": 1,
|
|
"method": "getTokenAccountsByOwner",
|
|
"params": [
|
|
wallet_address,
|
|
{"mint": USDC_MINT},
|
|
{"encoding": "jsonParsed"},
|
|
],
|
|
}
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
try:
|
|
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
data = json.loads(resp.read())
|
|
accounts = data.get("result", {}).get("value", [])
|
|
total = 0.0
|
|
for acct in accounts:
|
|
info = acct.get("account", {}).get("data", {}).get("parsed", {}).get("info", {})
|
|
token_amount = info.get("tokenAmount", {})
|
|
total += float(token_amount.get("uiAmount", 0))
|
|
return total
|
|
except Exception as e:
|
|
logger.warning("Solana RPC USDC balance failed for %s: %s", wallet_address[:12], e)
|
|
return None
|
|
|
|
|
|
def fetch_solana_token_balance(wallet_address, token_mint):
|
|
"""Get balance of a specific SPL token for a wallet from Solana RPC."""
|
|
if not wallet_address or not token_mint:
|
|
return None
|
|
payload = {
|
|
"jsonrpc": "2.0",
|
|
"id": 1,
|
|
"method": "getTokenAccountsByOwner",
|
|
"params": [
|
|
wallet_address,
|
|
{"mint": token_mint},
|
|
{"encoding": "jsonParsed"},
|
|
],
|
|
}
|
|
for attempt in range(3):
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
try:
|
|
with urllib.request.urlopen(req, timeout=10) as resp:
|
|
data = json.loads(resp.read())
|
|
if "error" in data:
|
|
code = data["error"].get("code", 0)
|
|
if code == 429 and attempt < 2:
|
|
wait = 10 * (attempt + 1)
|
|
logger.info("RPC rate limited for %s, retrying in %ds...", wallet_address[:12], wait)
|
|
time.sleep(wait)
|
|
continue
|
|
logger.warning("RPC error for %s: %s", wallet_address[:12], data["error"])
|
|
return None
|
|
accounts = data.get("result", {}).get("value", [])
|
|
total = 0.0
|
|
for acct in accounts:
|
|
info = acct.get("account", {}).get("data", {}).get("parsed", {}).get("info", {})
|
|
token_amount = info.get("tokenAmount", {})
|
|
total += float(token_amount.get("uiAmount", 0))
|
|
return total
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 429 and attempt < 2:
|
|
wait = 10 * (attempt + 1)
|
|
logger.info("RPC 429 for %s, retrying in %ds...", wallet_address[:12], wait)
|
|
time.sleep(wait)
|
|
continue
|
|
logger.warning("Solana RPC token balance failed for %s (mint %s): %s",
|
|
wallet_address[:12], token_mint[:12], e)
|
|
return None
|
|
except Exception as e:
|
|
logger.warning("Solana RPC token balance failed for %s (mint %s): %s",
|
|
wallet_address[:12], token_mint[:12], e)
|
|
return None
|
|
return None
|
|
|
|
|
|
|
|
# Meteora program IDs
|
|
METEORA_CPAMM = "cpamdpZCGKUy5JxQXB4dcpGPiikHawvSWAd6mEn1sGG"
|
|
METEORA_DLMM = "LBUZKhRxPF3XUpBCjp4YzTKgLccjZhTSDM9YuVaPwxo"
|
|
# CPAMM: vault_a at byte 232, vault_b at byte 264
|
|
# DLMM: reserve_x at byte 152, reserve_y at byte 184
|
|
|
|
def _resolve_meteora_vaults(pool_address):
|
|
"""For Meteora pools, read account data to find actual token vaults.
|
|
|
|
Returns (vault_a_addr, vault_b_addr, program_type) or (None, None, None).
|
|
"""
|
|
import base64
|
|
payload = {
|
|
"jsonrpc": "2.0", "id": 1,
|
|
"method": "getAccountInfo",
|
|
"params": [pool_address, {"encoding": "base64"}],
|
|
}
|
|
for attempt in range(3):
|
|
try:
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
data = json.loads(resp.read())
|
|
if "error" in data:
|
|
code = data["error"].get("code", 0)
|
|
if code == 429 and attempt < 2:
|
|
time.sleep(10 * (attempt + 1))
|
|
continue
|
|
return None, None, None
|
|
val = data.get("result", {}).get("value")
|
|
if not val:
|
|
return None, None, None
|
|
owner = val.get("owner", "")
|
|
raw = base64.b64decode(val["data"][0])
|
|
|
|
if owner == METEORA_CPAMM and len(raw) >= 296:
|
|
va = base58.b58encode(raw[232:264]).decode()
|
|
vb = base58.b58encode(raw[264:296]).decode()
|
|
return va, vb, "cpamm"
|
|
elif owner == METEORA_DLMM and len(raw) >= 216:
|
|
va = base58.b58encode(raw[152:184]).decode()
|
|
vb = base58.b58encode(raw[184:216]).decode()
|
|
return va, vb, "dlmm"
|
|
return None, None, None
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 429 and attempt < 2:
|
|
time.sleep(10 * (attempt + 1))
|
|
continue
|
|
return None, None, None
|
|
except Exception:
|
|
return None, None, None
|
|
return None, None, None
|
|
|
|
|
|
def _fetch_vault_balance(vault_address):
|
|
"""Get token balance from a vault/reserve account. Returns (mint, amount) or (None, 0)."""
|
|
payload = {
|
|
"jsonrpc": "2.0", "id": 1,
|
|
"method": "getAccountInfo",
|
|
"params": [vault_address, {"encoding": "jsonParsed"}],
|
|
}
|
|
for attempt in range(3):
|
|
try:
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
data = json.loads(resp.read())
|
|
if "error" in data:
|
|
code = data["error"].get("code", 0)
|
|
if code == 429 and attempt < 2:
|
|
time.sleep(10 * (attempt + 1))
|
|
continue
|
|
return None, 0.0
|
|
val = data.get("result", {}).get("value")
|
|
if not val or not isinstance(val.get("data"), dict):
|
|
return None, 0.0
|
|
info = val["data"]["parsed"]["info"]
|
|
mint = info["mint"]
|
|
amt = float(info["tokenAmount"]["uiAmountString"])
|
|
return mint, amt
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 429 and attempt < 2:
|
|
time.sleep(10 * (attempt + 1))
|
|
continue
|
|
return None, 0.0
|
|
except Exception:
|
|
return None, 0.0
|
|
return None, 0.0
|
|
|
|
|
|
def fetch_lp_wallet_balances(lp_pools, token_mint):
|
|
"""Query LP wallets for USDC balance and protocol-owned tokens.
|
|
|
|
Returns (lp_usdc_total, protocol_owned_tokens, lp_details_list).
|
|
"""
|
|
if not lp_pools:
|
|
return 0.0, 0.0, []
|
|
|
|
total_usdc = 0.0
|
|
total_protocol_tokens = 0.0
|
|
details = []
|
|
|
|
for pool in lp_pools:
|
|
address = pool.get("address")
|
|
dex = pool.get("dex", "unknown")
|
|
if not address:
|
|
continue
|
|
|
|
pool_usdc = 0.0
|
|
pool_tokens = 0.0
|
|
|
|
# Try Meteora vault resolution first (CPAMM + DLMM)
|
|
if dex == "meteora":
|
|
vault_a, vault_b, prog_type = _resolve_meteora_vaults(address)
|
|
if vault_a and vault_b:
|
|
logger.info("Meteora %s pool %s: vaults %s, %s", prog_type, address[:12], vault_a[:12], vault_b[:12])
|
|
time.sleep(2)
|
|
for vault_addr in [vault_a, vault_b]:
|
|
mint, amt = _fetch_vault_balance(vault_addr)
|
|
if mint and amt > 0:
|
|
if mint == USDC_MINT:
|
|
pool_usdc += amt
|
|
elif token_mint and mint == token_mint:
|
|
pool_tokens += amt
|
|
time.sleep(2)
|
|
else:
|
|
logger.warning("Meteora vault resolution failed for %s, falling back to getTokenAccountsByOwner", address[:12])
|
|
|
|
# Fallback: getTokenAccountsByOwner (works for futarchy-amm and non-Meteora pools)
|
|
if pool_usdc == 0 and pool_tokens == 0:
|
|
payload = {
|
|
"jsonrpc": "2.0",
|
|
"id": 1,
|
|
"method": "getTokenAccountsByOwner",
|
|
"params": [
|
|
address,
|
|
{"programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA"},
|
|
{"encoding": "jsonParsed"},
|
|
],
|
|
}
|
|
for attempt in range(3):
|
|
try:
|
|
req = urllib.request.Request(
|
|
SOLANA_RPC,
|
|
data=json.dumps(payload).encode(),
|
|
headers={"Content-Type": "application/json"},
|
|
)
|
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
|
data = json.loads(resp.read())
|
|
if "error" in data:
|
|
code = data["error"].get("code", 0)
|
|
if code == 429 and attempt < 2:
|
|
logger.info("RPC rate limited for %s, retrying in %ds...", address[:12], 5 * (attempt + 1))
|
|
time.sleep(10 * (attempt + 1))
|
|
continue
|
|
logger.warning("RPC error for LP %s: %s", address[:12], data["error"])
|
|
break
|
|
for acct in data.get("result", {}).get("value", []):
|
|
info = acct["account"]["data"]["parsed"]["info"]
|
|
mint = info["mint"]
|
|
amt = float(info["tokenAmount"]["uiAmountString"])
|
|
if amt == 0:
|
|
continue
|
|
if mint == USDC_MINT:
|
|
pool_usdc += amt
|
|
elif token_mint and mint == token_mint:
|
|
pool_tokens += amt
|
|
break
|
|
except urllib.error.HTTPError as e:
|
|
if e.code == 429 and attempt < 2:
|
|
wait = 5 * (attempt + 1)
|
|
logger.info("RPC 429 for %s, retrying in %ds...", address[:12], wait)
|
|
time.sleep(wait * 2)
|
|
continue
|
|
logger.warning("LP wallet query failed for %s (%s): %s", dex, address[:12], e)
|
|
break
|
|
except Exception as e:
|
|
logger.warning("LP wallet query failed for %s (%s): %s", dex, address[:12], e)
|
|
break
|
|
|
|
total_usdc += pool_usdc
|
|
total_protocol_tokens += pool_tokens
|
|
details.append({
|
|
"dex": dex,
|
|
"address": address,
|
|
"usdc": round(pool_usdc, 2),
|
|
"protocol_tokens": round(pool_tokens, 2),
|
|
})
|
|
time.sleep(5)
|
|
|
|
return total_usdc, total_protocol_tokens, details
|
|
|
|
|
|
def compute_derived(row, coin):
|
|
"""Compute effective liquidation price, delta, equity, runway."""
|
|
price = row.get("price_usd")
|
|
treasury = row.get("treasury_multisig_usd") or 0
|
|
lp_total = row.get("lp_usdc_total") or 0
|
|
mcap = row.get("market_cap_usd") or 0
|
|
monthly = coin.get("monthly_allowance")
|
|
protocol_tokens = row.get("protocol_owned_tokens") or 0
|
|
total_supply = row.get("total_supply")
|
|
|
|
cash_total = treasury + lp_total
|
|
|
|
adj_circ = row.get("adjusted_circulating_supply")
|
|
if not adj_circ and total_supply and total_supply > 0:
|
|
adj_circ = total_supply - protocol_tokens
|
|
row["adjusted_circulating_supply"] = adj_circ
|
|
|
|
if adj_circ and adj_circ > 0:
|
|
row["effective_liq_price"] = cash_total / adj_circ
|
|
if price and price > 0:
|
|
original_mcap = row.get("market_cap_usd")
|
|
row["market_cap_usd"] = price * adj_circ
|
|
mcap = row["market_cap_usd"]
|
|
if original_mcap and abs(mcap - original_mcap) > 1:
|
|
logger.debug("%s: adjusted mcap $%.0f (was $%.0f, protocol_owned=%s)",
|
|
row.get("name", "?"), mcap, original_mcap, protocol_tokens)
|
|
if price and price > 0 and row.get("effective_liq_price"):
|
|
row["delta_pct"] = ((row["effective_liq_price"] / price) - 1) * 100
|
|
|
|
row["equity_value_usd"] = mcap - cash_total if mcap else None
|
|
|
|
if monthly and monthly > 0 and treasury:
|
|
row["months_runway"] = treasury / monthly
|
|
|
|
return row
|
|
|
|
|
|
def upsert_snapshot(conn, row):
|
|
"""Insert or replace a daily snapshot."""
|
|
conn.execute("""
|
|
INSERT OR REPLACE INTO coin_snapshots (
|
|
snapshot_date, name, ticker, token_mint, status,
|
|
price_usd, market_cap_usd, fdv_usd,
|
|
circulating_supply, total_supply,
|
|
volume_24h_usd, liquidity_usd,
|
|
treasury_multisig_usd, lp_usdc_total, lp_pools_detail,
|
|
equity_value_usd, initial_price_usd, amount_raised_usd,
|
|
monthly_allowance_usd, effective_liq_price, delta_pct,
|
|
months_runway, protocol_owned_tokens, adjusted_circulating_supply,
|
|
treasury_protocol_tokens, vesting_tokens,
|
|
data_source, fetched_at
|
|
) VALUES (
|
|
:snapshot_date, :name, :ticker, :token_mint, :status,
|
|
:price_usd, :market_cap_usd, :fdv_usd,
|
|
:circulating_supply, :total_supply,
|
|
:volume_24h_usd, :liquidity_usd,
|
|
:treasury_multisig_usd, :lp_usdc_total, :lp_pools_detail,
|
|
:equity_value_usd, :initial_price_usd, :amount_raised_usd,
|
|
:monthly_allowance_usd, :effective_liq_price, :delta_pct,
|
|
:months_runway, :protocol_owned_tokens, :adjusted_circulating_supply,
|
|
:treasury_protocol_tokens, :vesting_tokens,
|
|
:data_source, :fetched_at
|
|
)
|
|
""", row)
|
|
|
|
|
|
def cmd_daily(coins, conn):
|
|
"""Fetch current data for all coins and store today's snapshot."""
|
|
today = datetime.date.today().isoformat()
|
|
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
|
|
for coin in coins:
|
|
mint = coin["token_mint"]
|
|
if not mint:
|
|
logger.info("Skipping %s — no token mint", coin["name"])
|
|
continue
|
|
|
|
logger.info("Fetching %s (%s)...", coin["name"], coin["ticker"])
|
|
|
|
# Current price from DexScreener
|
|
dex = fetch_dexscreener(mint)
|
|
if not dex:
|
|
logger.warning("DexScreener returned nothing for %s — trying last known price", coin["name"])
|
|
last_row = conn.execute(
|
|
"SELECT price_usd FROM coin_snapshots WHERE name=? AND price_usd IS NOT NULL ORDER BY snapshot_date DESC LIMIT 1",
|
|
(coin["name"],)
|
|
).fetchone()
|
|
if last_row and last_row[0]:
|
|
dex = {"price_usd": last_row[0], "market_cap_usd": None, "fdv_usd": None, "volume_24h_usd": None, "liquidity_usd": None, "circulating_supply": None, "total_supply": None}
|
|
logger.info(" Using last known price: $%.4f", last_row[0])
|
|
else:
|
|
logger.warning(" No historical price either — skipping %s", coin["name"])
|
|
continue
|
|
|
|
# Token supply from Solana RPC
|
|
supply = fetch_solana_token_supply(mint)
|
|
time.sleep(4)
|
|
|
|
# Treasury USDC balance + protocol token balance
|
|
treasury_usd = None
|
|
treasury_tokens = 0.0
|
|
if coin.get("treasury_multisig"):
|
|
treasury_usd = fetch_solana_usdc_balance(coin["treasury_multisig"])
|
|
time.sleep(2)
|
|
treas_tok = fetch_solana_token_balance(coin["treasury_multisig"], mint)
|
|
if treas_tok and treas_tok > 0:
|
|
treasury_tokens = treas_tok
|
|
logger.info(" %s treasury holds %.0f protocol tokens", coin["name"], treasury_tokens)
|
|
time.sleep(2)
|
|
|
|
time.sleep(4)
|
|
|
|
# Vesting wallet scanning — tokens locked in vesting contracts
|
|
vesting_tokens = 0.0
|
|
if coin.get("vesting_wallets"):
|
|
for vw in coin["vesting_wallets"]:
|
|
vw_addr = vw.get("address") if isinstance(vw, dict) else vw
|
|
if not vw_addr:
|
|
continue
|
|
vt = fetch_solana_token_balance(vw_addr, mint)
|
|
if vt and vt > 0:
|
|
vesting_tokens += vt
|
|
label = vw.get("label", vw_addr[:12]) if isinstance(vw, dict) else vw_addr[:12]
|
|
logger.info(" %s vesting wallet (%s) holds %.0f tokens", coin["name"], label, vt)
|
|
time.sleep(2)
|
|
|
|
# LP pool balances — query each wallet for USDC + protocol-owned tokens
|
|
lp_total = 0.0
|
|
protocol_tokens = 0.0
|
|
lp_detail = None
|
|
if coin.get("lp_pools"):
|
|
lp_total, protocol_tokens, lp_details_list = fetch_lp_wallet_balances(
|
|
coin["lp_pools"], mint
|
|
)
|
|
lp_detail = json.dumps(lp_details_list) if lp_details_list else None
|
|
|
|
total_supply = supply.get("total_supply")
|
|
|
|
# Adjusted circulating supply: total - LP tokens - treasury tokens
|
|
investor_locked = float(coin.get("investor_locked_tokens") or 0)
|
|
meteora_seed = float(coin.get("meteora_seed_tokens") or 0)
|
|
all_protocol_tokens = protocol_tokens + treasury_tokens + vesting_tokens + investor_locked + meteora_seed
|
|
if investor_locked > 0:
|
|
logger.info(" %s investor locked tokens: %.0f", coin["name"], investor_locked)
|
|
if meteora_seed > 0:
|
|
logger.info(" %s meteora seed tokens: %.0f", coin["name"], meteora_seed)
|
|
adj_circ = None
|
|
if total_supply and total_supply > 0:
|
|
adj_circ = total_supply - all_protocol_tokens
|
|
|
|
# If we have adj_circ and price but no mcap, compute from adjusted supply
|
|
if adj_circ and dex.get("price_usd"):
|
|
dex["market_cap_usd"] = adj_circ * dex["price_usd"]
|
|
elif total_supply and dex.get("price_usd") and not dex.get("market_cap_usd"):
|
|
dex["market_cap_usd"] = total_supply * dex["price_usd"]
|
|
|
|
row = {
|
|
"snapshot_date": today,
|
|
"name": coin["name"],
|
|
"ticker": coin["ticker"],
|
|
"token_mint": mint,
|
|
"status": coin["status"],
|
|
"price_usd": dex.get("price_usd"),
|
|
"market_cap_usd": dex.get("market_cap_usd"),
|
|
"fdv_usd": dex.get("fdv_usd"),
|
|
"circulating_supply": dex.get("circulating_supply"),
|
|
"total_supply": total_supply,
|
|
"volume_24h_usd": dex.get("volume_24h_usd"),
|
|
"liquidity_usd": dex.get("liquidity_usd"),
|
|
"treasury_multisig_usd": treasury_usd,
|
|
"lp_usdc_total": lp_total if lp_total else None,
|
|
"lp_pools_detail": lp_detail,
|
|
"equity_value_usd": None,
|
|
"initial_price_usd": coin.get("initial_price"),
|
|
"amount_raised_usd": coin.get("amount_raised"),
|
|
"monthly_allowance_usd": coin.get("monthly_allowance"),
|
|
"effective_liq_price": None,
|
|
"delta_pct": None,
|
|
"months_runway": None,
|
|
"protocol_owned_tokens": all_protocol_tokens if all_protocol_tokens else None,
|
|
"treasury_protocol_tokens": treasury_tokens if treasury_tokens else None,
|
|
"vesting_tokens": vesting_tokens if vesting_tokens else None,
|
|
"adjusted_circulating_supply": adj_circ,
|
|
"data_source": "dexscreener+solana_rpc",
|
|
"fetched_at": now,
|
|
}
|
|
|
|
row = compute_derived(row, coin)
|
|
upsert_snapshot(conn, row)
|
|
lp_msg = f" lp_usdc=${row.get('lp_usdc_total') or 0:,.0f} lp_tokens={protocol_tokens:,.0f} treas_tokens={treasury_tokens:,.0f}" if row.get("lp_usdc_total") or treasury_tokens else ""
|
|
logger.info(" %s: $%.4f mcap=$%s adj_circ=%s%s",
|
|
coin["name"], row["price_usd"] or 0,
|
|
f'{row["market_cap_usd"]:,.0f}' if row["market_cap_usd"] else "N/A",
|
|
f'{row["adjusted_circulating_supply"]:,.0f}' if row.get("adjusted_circulating_supply") else "N/A",
|
|
lp_msg)
|
|
time.sleep(1)
|
|
|
|
conn.commit()
|
|
logger.info("Daily snapshot complete for %s", today)
|
|
|
|
|
|
def cmd_backfill(coins, conn, days=365):
|
|
"""Backfill historical daily prices from CoinGecko."""
|
|
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
|
|
|
for coin in coins:
|
|
mint = coin["token_mint"]
|
|
if not mint:
|
|
logger.info("Skipping %s — no token mint", coin["name"])
|
|
continue
|
|
|
|
logger.info("Backfilling %s (%s) — %d days...", coin["name"], coin["ticker"], days)
|
|
history = fetch_coingecko_history(mint, days=days)
|
|
|
|
if not history:
|
|
logger.warning("No CoinGecko history for %s", coin["name"])
|
|
time.sleep(COINGECKO_RATE_LIMIT)
|
|
continue
|
|
|
|
inserted = 0
|
|
for point in history:
|
|
row = {
|
|
"snapshot_date": point["date"],
|
|
"name": coin["name"],
|
|
"ticker": coin["ticker"],
|
|
"token_mint": mint,
|
|
"status": coin["status"],
|
|
"price_usd": point["price_usd"],
|
|
"market_cap_usd": point.get("market_cap_usd"),
|
|
"fdv_usd": None,
|
|
"circulating_supply": None,
|
|
"total_supply": None,
|
|
"volume_24h_usd": point.get("volume_24h_usd"),
|
|
"liquidity_usd": None,
|
|
"treasury_multisig_usd": None,
|
|
"lp_usdc_total": None,
|
|
"lp_pools_detail": None,
|
|
"equity_value_usd": None,
|
|
"initial_price_usd": coin.get("initial_price"),
|
|
"amount_raised_usd": coin.get("amount_raised"),
|
|
"monthly_allowance_usd": coin.get("monthly_allowance"),
|
|
"effective_liq_price": None,
|
|
"delta_pct": None,
|
|
"months_runway": None,
|
|
"protocol_owned_tokens": None,
|
|
"adjusted_circulating_supply": None,
|
|
"treasury_protocol_tokens": None,
|
|
"vesting_tokens": None,
|
|
"data_source": "coingecko_history",
|
|
"fetched_at": now,
|
|
}
|
|
upsert_snapshot(conn, row)
|
|
inserted += 1
|
|
|
|
conn.commit()
|
|
logger.info(" %s: %d daily snapshots inserted", coin["name"], inserted)
|
|
time.sleep(COINGECKO_RATE_LIMIT)
|
|
|
|
logger.info("Backfill complete")
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Ownership coin portfolio data fetcher")
|
|
parser.add_argument("--daily", action="store_true", help="Fetch today's snapshot")
|
|
parser.add_argument("--backfill", action="store_true", help="Backfill historical prices")
|
|
parser.add_argument("--backfill-days", type=int, default=365, help="Days to backfill (default: 365)")
|
|
args = parser.parse_args()
|
|
|
|
if not args.daily and not args.backfill:
|
|
parser.error("Specify --daily or --backfill")
|
|
|
|
coins = load_ownership_coins()
|
|
logger.info("Loaded %d ownership coins (%d with token mints)",
|
|
len(coins), sum(1 for c in coins if c["token_mint"]))
|
|
|
|
conn = sqlite3.connect(str(DB_PATH), timeout=30)
|
|
conn.execute("PRAGMA journal_mode=WAL")
|
|
conn.execute("PRAGMA busy_timeout=30000")
|
|
ensure_schema(conn)
|
|
|
|
try:
|
|
if args.backfill:
|
|
cmd_backfill(coins, conn, days=args.backfill_days)
|
|
if args.daily:
|
|
cmd_daily(coins, conn)
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|