diff --git a/.github/workflows/usdai-mint-monitor.yml b/.github/workflows/usdai-mint-monitor.yml new file mode 100644 index 0000000..d33e2ef --- /dev/null +++ b/.github/workflows/usdai-mint-monitor.yml @@ -0,0 +1,20 @@ +name: USDai Large Mint Monitor + +on: + schedule: + - cron: "0/10 * * * *" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + usdai-large-mints: + uses: ./.github/workflows/_run-monitoring.yml + secrets: inherit + with: + cache_file: cache-id.txt + cache_key_prefix: cache-id-usdai-mints-v1 + scripts: | + usdai/large_mints.py diff --git a/usdai/large_mints.py b/usdai/large_mints.py new file mode 100644 index 0000000..06c6280 --- /dev/null +++ b/usdai/large_mints.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 +"""Monitor large USDai mints on Arbitrum with event and supply-delta checks.""" + +from decimal import Decimal, getcontext + +from web3 import Web3 + +from utils.abi import load_abi +from utils.alert import Alert, AlertSeverity, send_alert +from utils.cache import cache_filename, get_last_value_for_key_from_file, write_last_value_to_file +from utils.chains import Chain +from utils.config import Config +from utils.logging import get_logger +from utils.web3_wrapper import ChainManager + +getcontext().prec = 40 + +PROTOCOL = "usdai" +logger = get_logger(f"{PROTOCOL}.large_mints") + +USDAI_TOKEN_ADDR = Web3.to_checksum_address("0x0A1a1A107E45b7Ced86833863f482BC5f4ed82EF") + +MINT_THRESHOLD_TOKENS = Decimal(Config.get_env("USDAI_LARGE_MINT_THRESHOLD", "100000")) +CONFIRMATIONS = Config.get_env_int("USDAI_MINT_CONFIRMATIONS", 10) +INITIAL_LOOKBACK_BLOCKS = Config.get_env_int("USDAI_MINT_INITIAL_LOOKBACK_BLOCKS", 3000) +LOG_CHUNK_SIZE = Config.get_env_int("USDAI_MINT_LOG_CHUNK_SIZE", 2000) +MAX_LINES_IN_ALERT = Config.get_env_int("USDAI_MINT_MAX_LINES_IN_ALERT", 10) + +CACHE_KEY_LAST_BLOCK = f"{PROTOCOL}_large_mints_last_block" +CACHE_KEY_LAST_SUPPLY = f"{PROTOCOL}_large_mints_last_supply" + +TRANSFER_TOPIC0 = "0x" + Web3.keccak(text="Transfer(address,address,uint256)").hex() +ZERO_TOPIC = "0x" + "0" * 64 + + +def _to_int(value) -> int: + try: + return int(value) + except (TypeError, ValueError): + return 0 + + +def _format_units(raw_value: int, decimals: int) -> Decimal: + return Decimal(raw_value) / (Decimal(10) ** decimals) + + +def _extract_topic_address(topic) -> str: + topic_hex = topic.hex() if hasattr(topic, "hex") else str(topic) + return Web3.to_checksum_address("0x" + topic_hex[-40:]) + + +def _fetch_mint_logs(client, from_block: int, to_block: int) -> list: + logs = [] + if from_block > to_block: + return logs + + for start in range(from_block, to_block + 1, LOG_CHUNK_SIZE): + end = min(start + LOG_CHUNK_SIZE - 1, to_block) + params = { + "address": USDAI_TOKEN_ADDR, + "fromBlock": start, + "toBlock": end, + "topics": [TRANSFER_TOPIC0, ZERO_TOPIC], + } + chunk_logs = client.eth.get_logs(params) + logs.extend(chunk_logs) + + return logs + + +def _send_large_mints_alert(entries: list[dict], decimals: int, from_block: int, to_block: int) -> None: + explorer = Chain.ARBITRUM.explorer_url + lines = [ + "*USDai Large Mint Alert*", + f"Window: blocks {from_block:,} - {to_block:,}", + f"Threshold: {MINT_THRESHOLD_TOKENS:,.0f} USDai", + f"Detected: {len(entries)} large mint(s)", + "", + ] + + for entry in entries[:MAX_LINES_IN_ALERT]: + amount = _format_units(entry["amount_raw"], decimals) + tx_hash = entry["tx_hash"] + recipient = entry["to"] + tx_line = f"[{tx_hash}]({explorer}/tx/{tx_hash})" if explorer else tx_hash + recipient_line = f"[{recipient}]({explorer}/address/{recipient})" if explorer else recipient + lines.append(f"- {amount:,.2f} USDai -> {recipient_line} | block {entry['block_number']} | tx {tx_line}") + + extra_count = len(entries) - MAX_LINES_IN_ALERT + if extra_count > 0: + lines.append(f"- ...and {extra_count} more") + + send_alert(Alert(AlertSeverity.MEDIUM, "\n".join(lines), PROTOCOL)) + + +def _send_unexplained_supply_jump_alert( + supply_delta_raw: int, mint_events_total_raw: int, decimals: int, from_block: int, to_block: int +) -> None: + supply_delta = _format_units(supply_delta_raw, decimals) + mint_events_total = _format_units(mint_events_total_raw, decimals) + unexplained = _format_units(max(supply_delta_raw - mint_events_total_raw, 0), decimals) + + msg = ( + "*USDai Supply Jump (No Matching Mint Events)*\n\n" + f"Window: blocks {from_block:,} - {to_block:,}\n" + f"Total Supply Increase: {supply_delta:,.2f} USDai\n" + f"Sum of Transfer-based mint events: {mint_events_total:,.2f} USDai\n" + f"Unexplained increase: {unexplained:,.2f} USDai\n\n" + "Potential non-standard mint path detected. Please investigate bridge/admin mint paths." + ) + send_alert(Alert(AlertSeverity.MEDIUM, msg, PROTOCOL)) + + +def main() -> None: + client = ChainManager.get_client(Chain.ARBITRUM) + erc20_abi = load_abi("common-abi/ERC20.json") + usdai = client.get_contract(USDAI_TOKEN_ADDR, erc20_abi) + + try: + decimals = int(usdai.functions.decimals().call()) + threshold_raw = int(MINT_THRESHOLD_TOKENS * (Decimal(10) ** decimals)) + + latest_block = int(client.eth.block_number) + to_block = latest_block - CONFIRMATIONS + if to_block <= 0: + logger.warning("Latest block is too low to scan safely (latest=%s).", latest_block) + return + + last_block_cached = _to_int(get_last_value_for_key_from_file(cache_filename, CACHE_KEY_LAST_BLOCK)) + last_supply_cached = _to_int(get_last_value_for_key_from_file(cache_filename, CACHE_KEY_LAST_SUPPLY)) + + if last_block_cached > 0: + from_block = last_block_cached + 1 + else: + from_block = max(0, to_block - INITIAL_LOOKBACK_BLOCKS + 1) + logger.info( + "No cached block found. Initializing scan window to last %s blocks (%s -> %s).", + INITIAL_LOOKBACK_BLOCKS, + from_block, + to_block, + ) + + if from_block > to_block: + logger.info("No new finalized blocks to scan. from_block=%s to_block=%s", from_block, to_block) + write_last_value_to_file(cache_filename, CACHE_KEY_LAST_BLOCK, to_block) + current_supply_raw = int(usdai.functions.totalSupply().call(block_identifier=to_block)) + write_last_value_to_file(cache_filename, CACHE_KEY_LAST_SUPPLY, current_supply_raw) + return + + mint_logs = _fetch_mint_logs(client, from_block, to_block) + logger.info("Fetched %s mint-like Transfer logs from block %s to %s", len(mint_logs), from_block, to_block) + + large_mints: list[dict] = [] + mint_events_total_raw = 0 + + for log in mint_logs: + amount_raw = int(log["data"].hex(), 16) if hasattr(log["data"], "hex") else int(str(log["data"]), 16) + mint_events_total_raw += amount_raw + if amount_raw >= threshold_raw: + to_addr = _extract_topic_address(log["topics"][2]) + tx_hash = ( + log["transactionHash"].hex() + if hasattr(log["transactionHash"], "hex") + else str(log["transactionHash"]) + ) + large_mints.append( + { + "amount_raw": amount_raw, + "to": to_addr, + "tx_hash": tx_hash, + "block_number": int(log["blockNumber"]), + } + ) + + if large_mints: + _send_large_mints_alert(large_mints, decimals, from_block, to_block) + + current_supply_raw = int(usdai.functions.totalSupply().call(block_identifier=to_block)) + if last_supply_cached > 0: + supply_delta_raw = current_supply_raw - last_supply_cached + if supply_delta_raw >= threshold_raw and supply_delta_raw > mint_events_total_raw: + _send_unexplained_supply_jump_alert( + supply_delta_raw=supply_delta_raw, + mint_events_total_raw=mint_events_total_raw, + decimals=decimals, + from_block=from_block, + to_block=to_block, + ) + + write_last_value_to_file(cache_filename, CACHE_KEY_LAST_BLOCK, to_block) + write_last_value_to_file(cache_filename, CACHE_KEY_LAST_SUPPLY, current_supply_raw) + + except Exception as exc: + logger.error("USDai large mint monitoring failed: %s", exc) + send_alert(Alert(AlertSeverity.MEDIUM, f"USDai large mint monitor failed: {exc}", PROTOCOL), plain_text=True) + + +if __name__ == "__main__": + main() diff --git a/usdai/main.py b/usdai/main.py index 0262d66..f29300b 100644 --- a/usdai/main.py +++ b/usdai/main.py @@ -1,6 +1,5 @@ import datetime -import requests from web3 import Web3 from utils.abi import load_abi @@ -8,6 +7,7 @@ from utils.cache import cache_filename, get_last_value_for_key_from_file, write_last_value_to_file from utils.chains import Chain from utils.config import Config +from utils.defillama import fetch_prices from utils.logging import get_logger from utils.web3_wrapper import ChainManager @@ -15,13 +15,30 @@ PROTOCOL = "usdai" logger = get_logger(PROTOCOL) -VAULT_ADDR = Web3.to_checksum_address("0x0A1a1A107E45b7Ced86833863f482BC5f4ed82EF") -WM_TOKEN = Web3.to_checksum_address("0x437cc33344a0b27a429f795ff6b469c72698b291") +USDAI_VAULT_ADDR = Web3.to_checksum_address("0x0A1a1A107E45b7Ced86833863f482BC5f4ed82EF") +PYUSD_TOKEN_ADDR = Web3.to_checksum_address("0x46850aD61C2B7d64d08c9C754F45254596696984") SUSDAI_ADDR = Web3.to_checksum_address("0x0B2b2B2076d95dda7817e785989fE353fe955ef9") -GRAPHQL_URL = "https://protocol-api.m0.org/graphql" +LOAN_ROUTER_ADDR = Web3.to_checksum_address("0x0C2ED170F2bB1DF1a44292Ad621B577b3C9597D1") +# Alert thresholds (absolute deviation from 1.0) +# Raised defaults to reduce alert noise; still overrideable via env vars. +USDAI_PYUSD_WARN_DEVIATION = Config.get_env_float("USDAI_PYUSD_WARN_DEVIATION", 0.003) # 0.30% +USDAI_PYUSD_CRITICAL_DEVIATION = Config.get_env_float("USDAI_PYUSD_CRITICAL_DEVIATION", 0.01) # 1.00% +PYUSD_USD_WARN_DEVIATION = Config.get_env_float("PYUSD_USD_WARN_DEVIATION", 0.003) # 0.30% +PYUSD_USD_CRITICAL_DEVIATION = Config.get_env_float("PYUSD_USD_CRITICAL_DEVIATION", 0.0075) # 0.75% -LOAN_ROUTER_ADDR = Web3.to_checksum_address("0x0C2ED170F2bB1DF1a44292Ad621B577b3C9597D1") + +def send_breach_alert_once(cache_key, alert_message, severity=AlertSeverity.HIGH): + last_state = int(get_last_value_for_key_from_file(cache_filename, cache_key)) + if last_state == 0: + send_alert(Alert(severity, alert_message, PROTOCOL)) + write_last_value_to_file(cache_filename, cache_key, 1) + + +def clear_breach_state(cache_key): + last_state = int(get_last_value_for_key_from_file(cache_filename, cache_key)) + if last_state == 1: + write_last_value_to_file(cache_filename, cache_key, 0) def get_loan_details(client, owner_addr): @@ -71,71 +88,111 @@ def main(): # Common ABI erc20_abi = load_abi("common-abi/ERC20.json") - wm = client.get_contract(WM_TOKEN, erc20_abi) + usdai = client.get_contract(USDAI_VAULT_ADDR, erc20_abi) + pyusd = client.get_contract(PYUSD_TOKEN_ADDR, erc20_abi) try: - # --- On-Chain Supply --- - # USDai Supply (wM held by Vault) - vault_shares = wm.functions.balanceOf(VAULT_ADDR).call() - # Decimals will always be the same = 6 - wm_decimals = 6 - usdai_supply_fmt = vault_shares / (10**wm_decimals) - - # 2. Get Mint Ratio via API - query = """ - query { - mintRatio: protocolConfigs( - where: {key: "mint_ratio"} - orderBy: blockTimestamp - orderDirection: desc - first: 1 - ) { - value - blockTimestamp - } - } - """ - - mint_ratio = 10000 # Default to 1:1 (scaled 1e4 for bps) if not found - - try: - res = requests.post(GRAPHQL_URL, json={"query": query}, timeout=Config.get_request_timeout()) - if res.status_code == 200: - data = res.json().get("data", {}) - - # --- Mint Ratio --- - configs = data.get("mintRatio", []) - if configs: - # Mint ratio is scaled by 1e4 (e.g. 9950 = 99.5%) - mint_ratio_raw = int(configs[0].get("value", 10000)) - mint_ratio = mint_ratio_raw - - except Exception as e: - logger.error("API Error: %s", e) - - # Derived Collateral from Mint Ratio - # Scaling: mint_ratio is in bps (1e4). - # So mint_ratio_fmt = mint_ratio / 10000. - mint_ratio_fmt = mint_ratio / 10000 - - # Avoid division by zero - required_collateral = 0 - if mint_ratio_fmt > 0: - required_collateral = usdai_supply_fmt / mint_ratio_fmt + # --- 1) USDai / pyUSD Backing Ratio --- + with client.batch_requests() as batch: + batch.add(usdai.functions.decimals()) + batch.add(usdai.functions.totalSupply()) + batch.add(pyusd.functions.decimals()) + batch.add(pyusd.functions.symbol()) + batch.add(pyusd.functions.balanceOf(USDAI_VAULT_ADDR)) + usdai_decimals, usdai_supply_raw, pyusd_decimals, pyusd_symbol, pyusd_assets_raw = client.execute_batch( + batch + ) + + usdai_supply_fmt = usdai_supply_raw / (10**usdai_decimals) + pyusd_assets_fmt = pyusd_assets_raw / (10**pyusd_decimals) + backing_ratio = (pyusd_assets_fmt / usdai_supply_fmt) if usdai_supply_fmt > 0 else 0 + backing_deviation = abs(backing_ratio - 1) logger.info("--- USDai Stats ---") logger.info("USDai Supply: $%s", f"{usdai_supply_fmt:,.2f}") - logger.info("Mint Ratio: %s", f"{mint_ratio_fmt:.4f}") - - collateral_metric = required_collateral - # Buffer = Collateral - Supply - buffer = collateral_metric - usdai_supply_fmt + logger.info("%s Assets: $%s", pyusd_symbol, f"{pyusd_assets_fmt:,.2f}") + logger.info("Backing Ratio: %s %s / USDai", f"{backing_ratio:.6f}", pyusd_symbol) + + cache_key_backing_warn = f"{PROTOCOL}_backing_ratio_warn_breach" + cache_key_backing_critical = f"{PROTOCOL}_backing_ratio_critical_breach" + + if usdai_supply_fmt > 0: + if backing_deviation >= USDAI_PYUSD_CRITICAL_DEVIATION: + send_breach_alert_once( + cache_key=cache_key_backing_critical, + severity=AlertSeverity.CRITICAL, + alert_message=( + "*USDai Backing Ratio Critical*\n\n" + f"{pyusd_symbol} per USDai: {backing_ratio:.6f}\n" + f"Deviation from 1.0: {backing_deviation:.3%}\n" + f"{pyusd_symbol} Assets: ${pyusd_assets_fmt:,.2f}\n" + f"USDai Supply: ${usdai_supply_fmt:,.2f}" + ), + ) + else: + clear_breach_state(cache_key_backing_critical) + + if USDAI_PYUSD_WARN_DEVIATION <= backing_deviation < USDAI_PYUSD_CRITICAL_DEVIATION: + send_breach_alert_once( + cache_key=cache_key_backing_warn, + severity=AlertSeverity.HIGH, + alert_message=( + "*USDai Backing Ratio Alert*\n\n" + f"{pyusd_symbol} per USDai: {backing_ratio:.6f}\n" + f"Deviation from 1.0: {backing_deviation:.3%}\n" + f"{pyusd_symbol} Assets: ${pyusd_assets_fmt:,.2f}\n" + f"USDai Supply: ${usdai_supply_fmt:,.2f}" + ), + ) + else: + clear_breach_state(cache_key_backing_warn) - logger.info("Collateral: $%s", f"{collateral_metric:,.2f}") - logger.info("Buffer: $%s", f"{buffer:,.2f}") + # --- 2) pyUSD / USD Peg --- + pyusd_key = f"{Chain.ARBITRUM.network_name}:{PYUSD_TOKEN_ADDR.lower()}" + pyusd_price = None + try: + prices = fetch_prices([pyusd_key]) + pyusd_price = prices.get(pyusd_key) + except Exception as e: + logger.error("pyUSD price fetch error: %s", e) + + if pyusd_price is not None: + pyusd_price = float(pyusd_price) + pyusd_price_deviation = abs(pyusd_price - 1) + logger.info("%s / USD: %s", pyusd_symbol, f"{pyusd_price:.6f}") + + cache_key_peg_warn = f"{PROTOCOL}_pyusd_peg_warn_breach" + cache_key_peg_critical = f"{PROTOCOL}_pyusd_peg_critical_breach" + + if pyusd_price_deviation >= PYUSD_USD_CRITICAL_DEVIATION: + send_breach_alert_once( + cache_key=cache_key_peg_critical, + severity=AlertSeverity.CRITICAL, + alert_message=( + f"*{pyusd_symbol}/USD Peg Critical*\n\n" + f"{pyusd_symbol}/USD: ${pyusd_price:.6f}\n" + f"Deviation from $1: {pyusd_price_deviation:.3%}" + ), + ) + else: + clear_breach_state(cache_key_peg_critical) + + if PYUSD_USD_WARN_DEVIATION <= pyusd_price_deviation < PYUSD_USD_CRITICAL_DEVIATION: + send_breach_alert_once( + cache_key=cache_key_peg_warn, + severity=AlertSeverity.HIGH, + alert_message=( + f"*{pyusd_symbol}/USD Peg Alert*\n\n" + f"{pyusd_symbol}/USD: ${pyusd_price:.6f}\n" + f"Deviation from $1: {pyusd_price_deviation:.3%}" + ), + ) + else: + clear_breach_state(cache_key_peg_warn) + else: + logger.warning("No price returned for %s (%s)", pyusd_symbol, pyusd_key) # --- Loan Monitoring (GPU Loans) --- - all_loans = get_loan_details(client, SUSDAI_ADDR) # --- Manual Adjustment for Legacy Loan --- @@ -190,7 +247,7 @@ def main(): ) msg = ( - f"*sUSDai Loan Activity*\n\n" + "*sUSDai Loan Activity*\n\n" f"Total Verified Principal has {change_type}.\n" f"Change: ${diff:,.2f} ({percent_change:.2f}% of Total Loans)\n" f"Old Total: ${last_principal:,.2f}\n" @@ -202,38 +259,6 @@ def main(): # Update cache write_last_value_to_file(cache_filename, cache_key_principal, total_verified_principal) - if collateral_metric > 0: - # 1. Check for Mint Ratio Change (Critical) - cache_key_ratio = f"{PROTOCOL}_mint_ratio" - last_ratio = int(get_last_value_for_key_from_file(cache_filename, cache_key_ratio)) - - if last_ratio != 0 and last_ratio != mint_ratio: - msg = f"*USDai Mint Ratio Changed*\n\nOld: {last_ratio / 10000:.4f}\nNew: {mint_ratio / 10000:.4f}" - send_alert(Alert(AlertSeverity.HIGH, msg, PROTOCOL)) - - # Always update ratio cache - write_last_value_to_file(cache_filename, cache_key_ratio, mint_ratio) - - # 2. Check for Low Buffer (ignore withdrawals) - cache_key_buffer = f"{PROTOCOL}_buffer" - last_buffer = float(get_last_value_for_key_from_file(cache_filename, cache_key_buffer)) - - # Only alert when buffer drops below $1,000,000 - buffer_alert_threshold = 1_000_000 - if last_buffer != 0: - crossed_below = last_buffer >= buffer_alert_threshold and buffer < buffer_alert_threshold - if crossed_below: - msg = ( - "*USDai Low Buffer Alert*\n\n" - f"Buffer dropped below ${buffer_alert_threshold:,.0f}.\n" - f"Old Buffer: ${last_buffer:,.2f}\n" - f"New Buffer: ${buffer:,.2f}\n" - f"(Collateral: ${collateral_metric:,.2f})" - ) - send_alert(Alert(AlertSeverity.HIGH, msg, PROTOCOL)) - - write_last_value_to_file(cache_filename, cache_key_buffer, buffer) - except Exception as e: logger.error("Error: %s", e) send_alert(Alert(AlertSeverity.LOW, f"USDai monitoring failed: {e}", PROTOCOL), plain_text=True)