diff --git a/notebooks/01_data_collection.ipynb b/notebooks/01_data_collection.ipynb new file mode 100644 index 0000000..377bcab --- /dev/null +++ b/notebooks/01_data_collection.ipynb @@ -0,0 +1,376 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 01 — TEC Data Collection\n", + "\n", + "Pull all on-chain data for the TEC token, ABC transactions, and conviction voting proposals.\n", + "\n", + "## Data targets:\n", + "1. **ABC Buy/Sell Orders** — every mint/burn through the bonding curve\n", + "2. **TEC Token Transfers** — all ERC-20 transfers\n", + "3. **Conviction Voting Proposals** — all proposals, stakes, outcomes\n", + "4. **Honeyswap Trades** — secondary market TEC/xDAI trades\n", + "5. **Reserve & Common Pool balances** — over time" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import time\n", + "import requests\n", + "import pandas as pd\n", + "from web3 import Web3\n", + "from datetime import datetime\n", + "\n", + "# Gnosis Chain RPC\n", + "RPC_URL = \"https://rpc.gnosischain.com\"\n", + "w3 = Web3(Web3.HTTPProvider(RPC_URL))\n", + "print(f\"Connected to Gnosis Chain: {w3.is_connected()}, block: {w3.eth.block_number}\")\n", + "\n", + "# Contract addresses\n", + "TEC_TOKEN = \"0x5dF8339c5E282ee48c0c7cE8A7d01a73D38B3B27\"\n", + "ABC_CONTRACT = \"0x74ade20c12067e2f9457c037809a73f35694f99f\"\n", + "RESERVE_POOL = \"0x4a3C145c35Fa0daa58Cb5BD93CE905B086087246\"\n", + "COMMON_POOL = \"0xb941365430a16659658bb23b88efaede1d839354\"\n", + "\n", + "ABC_START_BLOCK = 20087409\n", + "TOKEN_START_BLOCK = 20086944\n", + "\n", + "DATA_DIR = \"../data/onchain\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. ABC Buy/Sell Orders\n", + "\n", + "Event signatures from the ABC contract:\n", + "- `MakeBuyOrder(address indexed buyer, address indexed onBehalfOf, address indexed collateral, uint256 fee, uint256 purchaseAmount, uint256 returnedAmount, uint256 feePct)`\n", + "- `MakeSellOrder(address indexed seller, address indexed onBehalfOf, address indexed collateral, uint256 fee, uint256 sellAmount, uint256 returnedAmount, uint256 feePct)`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Event topic hashes\n", + "BUY_ORDER_TOPIC = w3.keccak(text=\"MakeBuyOrder(address,address,address,uint256,uint256,uint256,uint256)\").hex()\n", + "SELL_ORDER_TOPIC = w3.keccak(text=\"MakeSellOrder(address,address,address,uint256,uint256,uint256,uint256)\").hex()\n", + "\n", + "print(f\"BuyOrder topic: {BUY_ORDER_TOPIC}\")\n", + "print(f\"SellOrder topic: {SELL_ORDER_TOPIC}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def get_logs_chunked(address, topics, from_block, to_block=None, chunk_size=50000):\n", + " \"\"\"Fetch event logs in chunks to avoid RPC limits.\"\"\"\n", + " if to_block is None:\n", + " to_block = w3.eth.block_number\n", + " \n", + " all_logs = []\n", + " current = from_block\n", + " \n", + " while current <= to_block:\n", + " end = min(current + chunk_size - 1, to_block)\n", + " try:\n", + " logs = w3.eth.get_logs({\n", + " 'address': Web3.to_checksum_address(address),\n", + " 'topics': topics,\n", + " 'fromBlock': current,\n", + " 'toBlock': end\n", + " })\n", + " all_logs.extend(logs)\n", + " print(f\" Blocks {current}-{end}: {len(logs)} events (total: {len(all_logs)})\")\n", + " except Exception as e:\n", + " print(f\" Error at {current}-{end}: {e}\")\n", + " # Try smaller chunks on error\n", + " if chunk_size > 5000:\n", + " smaller = get_logs_chunked(address, topics, current, end, chunk_size // 5)\n", + " all_logs.extend(smaller)\n", + " current = end + 1\n", + " time.sleep(0.2) # Rate limiting\n", + " \n", + " return all_logs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(\"Fetching ABC BuyOrder events...\")\n", + "buy_logs = get_logs_chunked(ABC_CONTRACT, [BUY_ORDER_TOPIC], ABC_START_BLOCK)\n", + "print(f\"\\nTotal BuyOrders: {len(buy_logs)}\")\n", + "\n", + "print(\"\\nFetching ABC SellOrder events...\")\n", + "sell_logs = get_logs_chunked(ABC_CONTRACT, [SELL_ORDER_TOPIC], ABC_START_BLOCK)\n", + "print(f\"\\nTotal SellOrders: {len(sell_logs)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def parse_abc_event(log, event_type):\n", + " \"\"\"Parse ABC buy/sell event log into a dict.\"\"\"\n", + " # Indexed params are in topics[1:], non-indexed in data\n", + " topics = log['topics']\n", + " data = log['data'].hex() if isinstance(log['data'], bytes) else log['data']\n", + " if data.startswith('0x'):\n", + " data = data[2:]\n", + " \n", + " # Decode non-indexed params (fee, purchaseAmount/sellAmount, returnedAmount, feePct)\n", + " chunks = [data[i:i+64] for i in range(0, len(data), 64)]\n", + " \n", + " block = w3.eth.get_block(log['blockNumber'])\n", + " \n", + " result = {\n", + " 'block_number': log['blockNumber'],\n", + " 'tx_hash': log['transactionHash'].hex() if isinstance(log['transactionHash'], bytes) else log['transactionHash'],\n", + " 'timestamp': datetime.utcfromtimestamp(block['timestamp']),\n", + " 'event_type': event_type,\n", + " 'actor': '0x' + topics[1].hex()[-40:] if isinstance(topics[1], bytes) else '0x' + topics[1][-40:],\n", + " 'on_behalf_of': '0x' + topics[2].hex()[-40:] if isinstance(topics[2], bytes) else '0x' + topics[2][-40:],\n", + " 'collateral': '0x' + topics[3].hex()[-40:] if isinstance(topics[3], bytes) else '0x' + topics[3][-40:],\n", + " 'fee_wei': int(chunks[0], 16),\n", + " 'amount_wei': int(chunks[1], 16), # purchaseAmount or sellAmount\n", + " 'returned_wei': int(chunks[2], 16),\n", + " 'fee_pct': int(chunks[3], 16),\n", + " }\n", + " # Convert to human-readable (18 decimals)\n", + " result['fee'] = result['fee_wei'] / 1e18\n", + " result['amount'] = result['amount_wei'] / 1e18\n", + " result['returned'] = result['returned_wei'] / 1e18\n", + " \n", + " return result\n", + "\n", + "# Note: This will make many RPC calls for block timestamps.\n", + "# For large datasets, consider batching or caching block timestamps.\n", + "print(\"Parsing buy orders (this may take a while due to block timestamp lookups)...\")\n", + "buy_orders = [parse_abc_event(log, 'buy') for log in buy_logs]\n", + "print(f\"Parsed {len(buy_orders)} buy orders\")\n", + "\n", + "print(\"Parsing sell orders...\")\n", + "sell_orders = [parse_abc_event(log, 'sell') for log in sell_logs]\n", + "print(f\"Parsed {len(sell_orders)} sell orders\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Combine and save\n", + "all_orders = buy_orders + sell_orders\n", + "df_abc = pd.DataFrame(all_orders).sort_values('block_number').reset_index(drop=True)\n", + "\n", + "os.makedirs(DATA_DIR, exist_ok=True)\n", + "df_abc.to_csv(f\"{DATA_DIR}/abc_orders.csv\", index=False)\n", + "print(f\"Saved {len(df_abc)} ABC orders\")\n", + "df_abc.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. TEC Token Transfers\n", + "\n", + "Standard ERC-20 `Transfer(address indexed from, address indexed to, uint256 value)` events." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "TRANSFER_TOPIC = w3.keccak(text=\"Transfer(address,address,uint256)\").hex()\n", + "\n", + "print(\"Fetching TEC Transfer events...\")\n", + "transfer_logs = get_logs_chunked(TEC_TOKEN, [TRANSFER_TOPIC], TOKEN_START_BLOCK)\n", + "print(f\"\\nTotal Transfers: {len(transfer_logs)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def parse_transfer(log):\n", + " topics = log['topics']\n", + " data = log['data'].hex() if isinstance(log['data'], bytes) else log['data']\n", + " if data.startswith('0x'):\n", + " data = data[2:]\n", + " \n", + " return {\n", + " 'block_number': log['blockNumber'],\n", + " 'tx_hash': log['transactionHash'].hex() if isinstance(log['transactionHash'], bytes) else log['transactionHash'],\n", + " 'from': '0x' + (topics[1].hex() if isinstance(topics[1], bytes) else topics[1])[-40:],\n", + " 'to': '0x' + (topics[2].hex() if isinstance(topics[2], bytes) else topics[2])[-40:],\n", + " 'value_wei': int(data[:64], 16),\n", + " 'value': int(data[:64], 16) / 1e18,\n", + " }\n", + "\n", + "transfers = [parse_transfer(log) for log in transfer_logs]\n", + "df_transfers = pd.DataFrame(transfers).sort_values('block_number').reset_index(drop=True)\n", + "df_transfers.to_csv(f\"{DATA_DIR}/tec_transfers.csv\", index=False)\n", + "print(f\"Saved {len(df_transfers)} transfers\")\n", + "df_transfers.head()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Conviction Voting Proposals\n", + "\n", + "The Gardens subgraph (hosted service) has been deprecated. We need to either:\n", + "1. Find the migrated Studio endpoint\n", + "2. Query Gardens contract events directly via RPC\n", + "3. Use cached/exported data from the TEC forum or Dune\n", + "\n", + "**TODO:** The Gardens conviction voting contract address for TEC needs to be identified.\n", + "It was deployed via the GardensTemplate at `0x5507A6365C47d5b201Af3c1CB18C7fD4a889321b`.\n", + "The Disputable CV app ID is `0xca60629a22f03bcad7738fee1a6f0c5863eb89463621b40566a6799b82cbe184`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Try to find the TEC Garden/DAO address by looking at token controller or known interactions\n", + "# The MiniMe token's controller would be the fundraising/ABC app\n", + "# The Gardens DAO would interact with the common pool\n", + "\n", + "# Let's check what contracts have interacted with the common pool\n", + "print(\"Common Pool address:\", COMMON_POOL)\n", + "print(\"This is where conviction voting proposals draw funds from.\")\n", + "print(\"\\nWe need to find the CV contract by looking at outgoing transfers from the common pool.\")\n", + "print(\"These would correspond to funded proposals.\")\n", + "\n", + "# Fetch transfers FROM the common pool\n", + "ZERO_PADDED_COMMON_POOL = '0x' + COMMON_POOL[2:].lower().zfill(64)\n", + "print(f\"\\nFetching transfers FROM common pool...\")\n", + "common_pool_out_logs = get_logs_chunked(\n", + " TEC_TOKEN, \n", + " [TRANSFER_TOPIC, ZERO_PADDED_COMMON_POOL], # from = common pool\n", + " TOKEN_START_BLOCK\n", + ")\n", + "print(f\"Outgoing transfers from common pool: {len(common_pool_out_logs)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Parse common pool outflows — these represent funded proposals\n", + "if common_pool_out_logs:\n", + " cp_outflows = [parse_transfer(log) for log in common_pool_out_logs]\n", + " df_cp_out = pd.DataFrame(cp_outflows).sort_values('block_number').reset_index(drop=True)\n", + " df_cp_out.to_csv(f\"{DATA_DIR}/common_pool_outflows.csv\", index=False)\n", + " print(f\"Common pool outflows:\")\n", + " print(df_cp_out[['block_number', 'to', 'value']].to_string())\n", + "else:\n", + " print(\"No outflows found — common pool may use a different token (xDAI/WXDAI) for disbursements\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 4. Quick Sanity Check — Current State" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Check current TEC token supply and key balances\n", + "erc20_abi = [\n", + " {\"constant\": True, \"inputs\": [], \"name\": \"totalSupply\", \"outputs\": [{\"type\": \"uint256\"}], \"type\": \"function\"},\n", + " {\"constant\": True, \"inputs\": [{\"name\": \"_owner\", \"type\": \"address\"}], \"name\": \"balanceOf\", \"outputs\": [{\"type\": \"uint256\"}], \"type\": \"function\"},\n", + " {\"constant\": True, \"inputs\": [], \"name\": \"name\", \"outputs\": [{\"type\": \"string\"}], \"type\": \"function\"},\n", + " {\"constant\": True, \"inputs\": [], \"name\": \"symbol\", \"outputs\": [{\"type\": \"string\"}], \"type\": \"function\"},\n", + "]\n", + "\n", + "tec = w3.eth.contract(address=Web3.to_checksum_address(TEC_TOKEN), abi=erc20_abi)\n", + "\n", + "total_supply = tec.functions.totalSupply().call() / 1e18\n", + "reserve_bal = tec.functions.balanceOf(Web3.to_checksum_address(RESERVE_POOL)).call() / 1e18\n", + "common_bal = tec.functions.balanceOf(Web3.to_checksum_address(COMMON_POOL)).call() / 1e18\n", + "\n", + "# Check xDAI (native) balance of reserve pool\n", + "reserve_xdai = w3.eth.get_balance(Web3.to_checksum_address(RESERVE_POOL)) / 1e18\n", + "common_xdai = w3.eth.get_balance(Web3.to_checksum_address(COMMON_POOL)) / 1e18\n", + "\n", + "print(f\"TEC Token: {tec.functions.name().call()} ({tec.functions.symbol().call()})\")\n", + "print(f\"Total Supply: {total_supply:,.2f} TEC\")\n", + "print(f\"\\nReserve Pool:\")\n", + "print(f\" TEC balance: {reserve_bal:,.2f}\")\n", + "print(f\" xDAI balance: {reserve_xdai:,.2f}\")\n", + "print(f\"\\nCommon Pool:\")\n", + "print(f\" TEC balance: {common_bal:,.2f}\")\n", + "print(f\" xDAI balance: {common_xdai:,.2f}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Next Steps\n", + "\n", + "After running this notebook:\n", + "1. `abc_orders.csv` — All bonding curve buy/sell transactions with prices\n", + "2. `tec_transfers.csv` — All TEC token transfers\n", + "3. `common_pool_outflows.csv` — Funded proposals (if TEC denominated)\n", + "\n", + "**Still needed:**\n", + "- Honeyswap trade data (secondary market) — may need Honeyswap subgraph or Dune export\n", + "- Conviction voting proposal metadata (titles, descriptions, requesters) — forum scraping or Gardens subgraph\n", + "- Historical xDAI/USD price for USD-denominating all values\n", + "- Reserve pool balance history (xDAI over time)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/references/data-sources.md b/references/data-sources.md index 5de79e6..a762779 100644 --- a/references/data-sources.md +++ b/references/data-sources.md @@ -1,52 +1,100 @@ # TEC Data Sources & References +## Contract Addresses (Gnosis Chain) + +| Contract | Address | Start Block | +|----------|---------|-------------| +| **TEC Token (MiniMe)** | `0x5dF8339c5E282ee48c0c7cE8A7d01a73D38B3B27` | 20,086,944 | +| **Augmented Bonding Curve** | `0x74ade20c12067e2f9457c037809a73f35694f99f` | 20,087,409 | +| **Bonding Curve Reserve** | `0x4a3C145c35Fa0daa58Cb5BD93CE905B086087246` | 20,087,409 | +| **Common Pool** | `0xb941365430a16659658bb23b88efaede1d839354` | — | +| **Gardens Template** | `0x5507A6365C47d5b201Af3c1CB18C7fD4a889321b` | 17,550,461 | +| **Disputable CV App ID** | `0xca60629a...` (aragonPM) | — | + +## Subgraphs + +| Subgraph | Entities | Notes | +|----------|----------|-------| +| **TEC ABC** (`commonsbuild/tec-abc`) | `BuyOrder`, `SellOrder`, `Transfer` | Price, supply, reserve per txn | +| **Gardens** (`1hive/gardens-gnosis`) | `Organization`, `Proposal`, `Stake`, `ConvictionConfig` | CV proposals, stakes, outcomes | + +**Note:** The Graph hosted service has been deprecated. Subgraph endpoints may require migration to Studio or Satsuma. Data may need to be fetched via GnosisScan events API or direct RPC calls as fallback. + +## Dune Dashboards + +| Dashboard | URL | Tracks | +|-----------|-----|--------| +| **TEC Main (2024)** | https://dune.com/tec/tec | Price, supply, pools, trades, tributes | +| **TEC Legacy** | https://dune.com/tec/tec-gno | Pre-migration data | +| **TEC (tecommons)** | https://dune.com/tecommons/TECommons | Community dashboard | + +**Sub-dashboards in TEC Main:** +1. $TEC Market Information — supply, distribution, TEC-XDAI price +2. TEC Reserve & Common Pools — pool balances over time (native + USD) +3. TEC Core Group — multisig transactions (Optimism) +4. TEC Token Trades — Honeyswap + ABC usage +5. TEC ABC Tributes — entry/exit tribute revenue + ## CommonsBuild GitHub Repos (Key) ### Core ABC & Bonding Curve -- **[augmented-bonding-curve-model](https://github.com/CommonsBuild/augmented-bonding-curve-model)** — Python model of the ABC (Bancor formula based) -- **[tec-abc-subgraph](https://github.com/CommonsBuild/tec-abc-subgraph)** — Hosted subgraph for ABC on-chain data +- **[augmented-bonding-curve-model](https://github.com/CommonsBuild/augmented-bonding-curve-model)** — Python model of the ABC (Bancor formula) +- **[tec-abc-subgraph](https://github.com/CommonsBuild/tec-abc-subgraph)** — Subgraph for ABC on-chain data - **[tec-abc-dashboard](https://github.com/CommonsBuild/tec-abc-dashboard)** — ABC dashboard / token converter UI - **[tec-swap](https://github.com/CommonsBuild/tec-swap)** — Mint/burn TEC tokens interface -- **[FundraisingModel](https://github.com/CommonsBuild/FundraisingModel)** — Token engineering model of the Bancor Formula -- **[Token-Log-Hatch-Params](https://github.com/CommonsBuild/Token-Log-Hatch-Params)** — Community parameterization tool +- **[FundraisingModel](https://github.com/CommonsBuild/FundraisingModel)** — Bancor Formula engineering model ### Conviction Voting & Governance -- **[commons-config-dashboard](https://github.com/CommonsBuild/commons-config-dashboard)** — Parameter configuration dashboard -- **[commons-config-backend](https://github.com/CommonsBuild/commons-config-backend)** — Backend for config proposals -- **[commons-config-proposals](https://github.com/CommonsBuild/commons-config-proposals)** — Governance proposals +- **[1Hive/gardens](https://github.com/1Hive/gardens)** — Gardens platform (CV + Disputable Voting) +- **[commons-config-dashboard](https://github.com/CommonsBuild/commons-config-dashboard)** — Parameter configuration +- **[commons-config-backend](https://github.com/CommonsBuild/commons-config-backend)** — Config backend ### Rewards & Distribution -- **[tec-rewards](https://github.com/CommonsBuild/tec-rewards)** — Rewards analysis (Jupyter notebooks) +- **[tec-rewards](https://github.com/CommonsBuild/tec-rewards)** — Rewards analysis (Jupyter) - **[rad-classic](https://github.com/CommonsBuild/rad-classic)** — Rewards Analysis Dashboard -- **[tec-census](https://github.com/CommonsBuild/tec-census)** — TEC holder census (balances, contract detection) -- **[praiseanalysis](https://github.com/CommonsBuild/praiseanalysis)** — Praise system analysis +- **[tec-census](https://github.com/CommonsBuild/tec-census)** — Token holder census -### Shutdown/Sunset (Recent) -- **[tec-shutdown](https://github.com/CommonsBuild/tec-shutdown)** — Shutdown contracts (Solidity) -- **[TECredeem](https://github.com/CommonsBuild/TECredeem)** — Token redemption interface -- **[TECsunsetcomingsoon](https://github.com/CommonsBuild/TECsunsetcomingsoon)** — Sunset announcement +### Shutdown (2025) +- **[tec-shutdown](https://github.com/CommonsBuild/tec-shutdown)** — TECClaim contract (Solidity) +- **[TECredeem](https://github.com/CommonsBuild/TECredeem)** — Token redemption UI -## On-Chain Data Sources +## DEX / Secondary Market +- **Honeyswap** (1Hive DEX on Gnosis Chain): TEC/xDAI pair +- Honeyswap analytics: https://info.honeyswap.org/ -### Gnosis Chain (formerly xDai) -- TEC Token contract: _TBD — need contract address_ -- ABC / Bancor curve contract: _TBD_ -- Conviction Voting (Gardens) contract: _TBD_ -- Block explorer: https://gnosisscan.io/ +## Forum Sources +- **TEC Forum**: https://forum.tecommons.org/ + - [Shutdown Proposal](https://forum.tecommons.org/t/tec-shutdown-proposal/1520) — passed Nov 4, 2025 + - [Sunsetting & Treasury Distribution](https://forum.tecommons.org/t/sunsetting-the-tec-proposal-treasury-distribution-framework/1500) + - [Q1 2025 Progress Report](https://forum.tecommons.org/t/tec-q1-2025-progress-report/1472) + - [Dune Dashboard 2024 Announcement](https://forum.tecommons.org/t/dune-tec-dashboard-2024/1371) -### Subgraphs -- TEC ABC Subgraph (from `tec-abc-subgraph` repo) -- Gardens subgraph (1Hive): _TBD_ +## TEC Handbook +- [Conviction Voting](https://token-engineering-commons.gitbook.io/tec-handbook/governance/voting-tools-and-methods/conviction-voting) +- [CV Parameters](https://token-engineering-commons.gitbook.io/tec-handbook/archive/archived-content/fund-allocation-layer-and-tools/conviction-voting-funding) + - Conviction Growth: 7 days + - Minimum Conviction: 4% + - Spending Limit: 11% -### DEX / Secondary Market -- Honeyswap (1Hive DEX on Gnosis Chain): TEC/xDAI pair -- Any Uniswap/Sushiswap pairs (if bridged to mainnet) +## Key Financial Data (from forum posts) -## Off-Chain Data Sources -- TEC Forum: https://forum.tecommons.org/ -- Snapshot voting: _TBD_ -- Discord governance discussions -- Medium articles / blog posts +### Treasury at Shutdown (~Q3 2025) +| Pool | Balance | +|------|---------| +| Common Pool | ~$225K | +| Reserve Pool | ~$54K | +| Dormant Multisigs | ~$25K | +| **Total** | **~$300K** | + +### Token Metrics at Shutdown +- Supply: ~1.209M TEC +- Market price: ~$0.18 +- FDV: ~$217K +- Trading at ~33% discount to treasury value ($0.25 implied) + +### Lifetime Grants: $433,316 direct + $250K via Gitcoin + +### Q1 2025 ABC Tributes: ~$3,614 ## Local Related Repos - `/home/jeffe/Github/conviction/` — ABC simulations, cadCAD conviction voting models @@ -58,4 +106,4 @@ - Bancor Protocol whitepaper - Conviction Voting paper (BlockScience) - Commons Stack ABC specification -- _Add more as identified_ +- [TEC Handbook](https://token-engineering-commons.gitbook.io/tec-handbook/)