diff --git a/AGENTS.md b/AGENTS.md deleted file mode 100644 index c606bed1..00000000 --- a/AGENTS.md +++ /dev/null @@ -1,568 +0,0 @@ -# Using Hummingbot API with AI Agents - -This guide shows you how to interact with the Hummingbot API using various AI agents including ChatGPT, custom agents, and other MCP-compatible assistants. - -## 🤖 Method 1: MCP Server (Recommended) - -The Hummingbot MCP server provides natural language access to all API functionality through MCP-compatible AI clients. - -### OpenAI ChatGPT (Desktop App) - -If OpenAI releases an MCP-compatible desktop client, you can configure it similar to Claude: - -1. **Enable MCP during Hummingbot API setup**: - ```bash - ./setup.sh # Answer "y" to "Enable MCP server for AI assistant usage?" - ``` - -2. **Configure the MCP server**: - Add to your ChatGPT configuration file (location may vary): - ```json - { - "mcpServers": { - "hummingbot": { - "command": "docker", - "args": ["run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest"] - } - } - } - ``` - -3. **Start using natural language**: - - "Show me my portfolio across all exchanges" - - "What bots are currently running?" - - "Create a grid trading strategy for BTC-USDT" - - "Analyze my trading performance this month" - -### Custom MCP Clients - -For custom implementations, connect to the MCP server using stdio transport: - -**Python Example**: -```python -import subprocess -import json - -# Start the MCP server process -process = subprocess.Popen([ - "docker", "run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest" -], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - text=True -) - -# Send JSON-RPC request -request = { - "jsonrpc": "2.0", - "id": 1, - "method": "tools/list", - "params": {} -} -process.stdin.write(json.dumps(request) + "\n") -process.stdin.flush() - -# Read response -response = process.stdout.readline() -print(json.loads(response)) -``` - -**Node.js Example**: -```javascript -const { spawn } = require('child_process'); - -// Start MCP server -const mcp = spawn('docker', ['run', '--rm', '-i', '-e', 'HUMMINGBOT_API_URL=http://host.docker.internal:8000', '-v', 'hummingbot_mcp:/root/.hummingbot_mcp', 'hummingbot/hummingbot-mcp:latest']); - -// Send request -const request = { - jsonrpc: '2.0', - id: 1, - method: 'tools/list', - params: {} -}; - -mcp.stdin.write(JSON.stringify(request) + '\n'); - -// Handle response -mcp.stdout.on('data', (data) => { - console.log(JSON.parse(data.toString())); -}); -``` - -### Available MCP Tools - -The Hummingbot MCP server provides these tools: - -- **Portfolio Management**: `get_portfolio_balances`, `get_portfolio_distribution` -- **Bot Operations**: `list_bots`, `start_bot`, `stop_bot`, `get_bot_status` -- **Market Data**: `get_prices`, `get_order_book`, `get_candles` -- **Order Management**: `place_order`, `cancel_order`, `get_active_orders` -- **Account Management**: `list_accounts`, `add_credentials` -- **Strategy Management**: `list_strategies`, `get_strategy_template` - -For a complete list, use the `tools/list` MCP method. - -## 🔧 Method 2: Direct API Access (Standard HTTP) - -All AI agents can interact with the API using standard HTTP requests. - -### API Endpoints - -The API is accessible at `http://localhost:8000` with interactive Swagger docs at `http://localhost:8000/docs`. - -See @API_REFERENCE.md for the complete endpoint reference. - -### Authentication - -All endpoints require HTTP Basic Authentication: - -```bash -curl -u username:password http://localhost:8000/endpoint -``` - -Use the username and password you configured during setup (stored in `.env`). - -### Common API Operations - -**1. Get Portfolio Balances**: -```bash -curl -u admin:admin -X POST http://localhost:8000/portfolio/state \ - -H "Content-Type: application/json" \ - -d '{}' -``` - -**2. List Active Bots**: -```bash -curl -u admin:admin http://localhost:8000/bot-orchestration/status -``` - -**3. Get Market Prices**: -```bash -curl -u admin:admin -X POST http://localhost:8000/market-data/prices \ - -H "Content-Type: application/json" \ - -d '{ - "connector_name": "binance", - "trading_pairs": ["BTC-USDT", "ETH-USDT"] - }' -``` - -**4. Place an Order**: -```bash -curl -u admin:admin -X POST http://localhost:8000/trading/orders \ - -H "Content-Type: application/json" \ - -d '{ - "account_name": "master_account", - "connector_name": "binance", - "trading_pair": "BTC-USDT", - "order_type": "limit", - "trade_type": "buy", - "price": 50000, - "amount": 0.001 - }' -``` - -**5. Start a Trading Bot**: -```bash -curl -u admin:admin -X POST http://localhost:8000/bot-orchestration/deploy-v2-script \ - -H "Content-Type: application/json" \ - -d '{ - "bot_name": "my_pmm_bot", - "script": "v2_with_controllers", - "config": { - "connector": "binance", - "trading_pair": "ETH-USDT", - "total_amount_quote": 100 - } - }' -``` - -### Integration Examples - -**Python with requests**: -```python -import requests -from requests.auth import HTTPBasicAuth - -auth = HTTPBasicAuth('admin', 'admin') -base_url = 'http://localhost:8000' - -# Get portfolio state -response = requests.post( - f'{base_url}/portfolio/state', - json={}, - auth=auth -) -print(response.json()) -``` - -**JavaScript with fetch**: -```javascript -const username = 'admin'; -const password = 'admin'; -const baseURL = 'http://localhost:8000'; - -const headers = { - 'Content-Type': 'application/json', - 'Authorization': 'Basic ' + btoa(`${username}:${password}`) -}; - -// Get portfolio state -fetch(`${baseURL}/portfolio/state`, { - method: 'POST', - headers: headers, - body: JSON.stringify({}) -}) -.then(res => res.json()) -.then(data => console.log(data)); -``` - -## 🌐 Common Workflows - -### Managing Gateway Container (For DEX Trading) - -Gateway is required for decentralized exchange (DEX) trading. The `manage_gateway_container` MCP tool provides full lifecycle management. - -#### Using Natural Language (MCP-Compatible Assistants) - -If you're using Claude, ChatGPT, or other MCP-compatible AI assistants, you can manage Gateway with simple commands: - -- **"Start Gateway in development mode with passphrase 'admin'"** -- **"Check Gateway status"** -- **"Restart the Gateway container"** -- **"Stop Gateway"** - -#### Using MCP Tool Programmatically - -For custom integrations, call the `manage_gateway_container` tool via MCP: - -**Python Example**: -```python -import subprocess -import json - -# Start MCP server -process = subprocess.Popen([ - "docker", "run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", - "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest" -], - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - text=True -) - -def send_request(req): - process.stdin.write(json.dumps(req) + "\n") - process.stdin.flush() - return json.loads(process.stdout.readline()) - -# 1. Configure API connection (first time only) -send_request({ - "jsonrpc": "2.0", - "id": 1, - "method": "tools/call", - "params": { - "name": "configure_api_servers", - "arguments": { - "api_url": "http://host.docker.internal:8000", - "username": "admin", - "password": "admin" - } - } -}) - -# 2. Start Gateway container -gateway_response = send_request({ - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "start", - "config": { - "passphrase": "admin", - "dev_mode": True, - "image": "hummingbot/gateway:latest", - "port": 15888 - } - } - } -}) -print(gateway_response) - -# 3. Check Gateway status -status = send_request({ - "jsonrpc": "2.0", - "id": 3, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "get_status" - } - } -}) -print(status) - -# 4. Restart Gateway (if needed) -send_request({ - "jsonrpc": "2.0", - "id": 4, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "restart" - } - } -}) - -# 5. Stop Gateway -send_request({ - "jsonrpc": "2.0", - "id": 5, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "stop" - } - } -}) -``` - -**Node.js Example**: -```javascript -const { spawn } = require('child_process'); - -// Start MCP server -const mcp = spawn('docker', ['run', '--rm', '-i', '-e', 'HUMMINGBOT_API_URL=http://host.docker.internal:8000', '-v', 'hummingbot_mcp:/root/.hummingbot_mcp', 'hummingbot/hummingbot-mcp:latest']); - -let buffer = ''; -mcp.stdout.on('data', (data) => { - buffer += data.toString(); - const lines = buffer.split('\n'); - buffer = lines.pop(); - lines.forEach(line => { - if (line.trim()) { - console.log(JSON.parse(line)); - } - }); -}); - -function sendRequest(req) { - mcp.stdin.write(JSON.stringify(req) + '\n'); -} - -// 1. Configure API connection -sendRequest({ - jsonrpc: '2.0', - id: 1, - method: 'tools/call', - params: { - name: 'configure_api_servers', - arguments: { - api_url: 'http://host.docker.internal:8000', - username: 'admin', - password: 'admin' - } - } -}); - -// 2. Start Gateway container -sendRequest({ - jsonrpc: '2.0', - id: 2, - method: 'tools/call', - params: { - name: 'manage_gateway_container', - arguments: { - action: 'start', - config: { - passphrase: 'admin', - dev_mode: true, - image: 'hummingbot/gateway:latest', - port: 15888 - } - } - } -}); - -// 3. Check Gateway status -sendRequest({ - jsonrpc: '2.0', - id: 3, - method: 'tools/call', - params: { - name: 'manage_gateway_container', - arguments: { - action: 'get_status' - } - } -}); -``` - -#### Using Direct API Access (Alternative) - -If MCP is not available, you can manage Gateway through the API directly: - -```bash -# Start Gateway (via Swagger UI or curl) -curl -u admin:admin -X POST http://localhost:8000/manage-gateway \ - -H "Content-Type: application/json" \ - -d '{ - "action": "start", - "passphrase": "admin", - "dev_mode": true - }' - -# Check Gateway status -curl -u admin:admin http://localhost:8000/manage-gateway/status -``` - -#### Important Notes -- **Development mode** (`dev_mode: true`): HTTP access on port 15888, Swagger UI at `http://localhost:15888/docs` -- **Production mode** (`dev_mode: false`): HTTPS with certificates, more secure -- **Passphrase**: Encrypts/decrypts DEX wallet keys - store securely -- **Port**: Default is 15888, must be available on your system -- **Gateway URL**: `http://localhost:15888` (dev) or `https://localhost:15888` (prod) - -## 📚 API Reference - -For complete API documentation, see: -- **@API_REFERENCE.md**: Full endpoint reference with request/response examples -- **Swagger UI**: http://localhost:8000/docs (interactive documentation) -- **@README.md**: Setup instructions and architecture overview - -## 🆘 Troubleshooting - -**MCP Server Issues**: -```bash -# Check if MCP container is running -docker ps | grep hummingbot-mcp - -# View MCP logs -docker logs hummingbot-mcp - -# Restart MCP -docker compose restart hummingbot-mcp -``` - -**API Connection Issues**: -```bash -# Check if API is running -docker ps | grep hummingbot-api - -# View API logs -docker logs hummingbot-api - -# Test API connectivity -curl -u admin:admin http://localhost:8000/ -``` - -**Authentication Errors**: -- Verify credentials in `.env` file -- Ensure you're using the correct username and password -- Check that the API container is running - -**Docker Issues**: -```bash -# Ensure Docker is running -docker ps - -# Restart all services -docker compose restart - -# View all logs -docker compose logs -f -``` - -## 🚀 Next Steps - -1. **Explore the API**: Visit http://localhost:8000/docs -2. **Read API Reference**: See @API_REFERENCE.md for all endpoints -3. **Set up credentials**: Add exchange API keys via `/accounts/add-credential` -4. **Deploy a bot**: Start with a simple PMM or DCA strategy -5. **Monitor performance**: Use portfolio and bot status endpoints - -## 💡 Tips for AI Agent Integration - -1. **Use MCP when possible**: More natural language interface, automatic tool discovery -2. **Handle authentication**: Store credentials securely in your agent's configuration -3. **Implement retry logic**: API calls may timeout, implement exponential backoff -4. **Parse responses carefully**: All responses are JSON, handle errors appropriately -5. **Use Swagger UI**: Test endpoints manually before integrating into your agent - -## MCP Tools Best Practices - -### Using `configure_api_servers` for Connection Management - -**Before using any MCP tools**, always ensure the API server is properly configured: - -```python -# Check if connection is working - if any MCP tool fails, reconnect: -configure_api_servers(action="add", name="local", host="localhost", port=8000, username="admin", password="admin") -configure_api_servers(action="set_default", name="local") -``` - -### Using `get_portfolio_overview` for Token Balances - -**Preferred method for checking balances**: -- Use `get_portfolio_overview()` instead of direct API calls -- Includes CEX balances, DEX balances, LP positions, and active orders in one call -- Automatically handles all account types (Hyperliquid, Solana, Ethereum, etc.) - -```python -# Get complete portfolio overview -get_portfolio_overview( - include_balances=True, - include_perp_positions=False, - include_lp_positions=True, - include_active_orders=True, - as_distribution=False -) -``` - -### Common MCP Connection Issue - -**Error**: -``` -Error executing tool get_portfolio_overview: ❌ Failed to connect to Hummingbot API at http://docker.host.internal:8000 - -Connection failed after 3 attempts. - -💡 Solutions: - 1. Check if the API is running and accessible - 2. Verify your credentials are correct - 3. Use 'configure_api_servers' tool for setup - -Original error: Cannot connect to host docker.host.internal:8000 ssl:default [Name or service not known] -``` - -**Root Cause**: The MCP tool loses connection to the API server. This happens when: -- MCP server reconnects/restarts -- API credentials are not cached -- Network configuration changes - -**Solution**: Reconfigure the API server connection before retrying: - -```python -# Step 1: Add server configuration -configure_api_servers( - action="add", - name="local", - host="localhost", - port=8000, - username="admin", - password="admin" -) - -# Step 2: Set as default -configure_api_servers(action="set_default", name="local") - -# Step 3: Retry the operation -get_portfolio_overview(include_balances=True) -``` - -**Prevention**: Always check connection before using other MCP tools. If you see any connection error, immediately run `configure_api_servers` to restore the connection. diff --git a/API_REFERENCE.md b/API_REFERENCE.md deleted file mode 100644 index ca1106fd..00000000 --- a/API_REFERENCE.md +++ /dev/null @@ -1,1080 +0,0 @@ -# Hummingbot API Reference for AI Assistants - -**Quick Start:** This API is accessible at `http://localhost:8000` with interactive docs at `http://localhost:8000/docs`. - -## 🤖 MCP Tools (Recommended - Use These First!) - -**For AI Assistants:** Before making direct API calls, check if an MCP tool exists for your task. MCP tools provide simplified, high-level access to common operations. - -### Essential Setup & Connection - -#### `configure_api_servers` - **ALWAYS RUN FIRST** -Configure connection to the Hummingbot API. Run this before using any other MCP tool. - -```python -configure_api_servers( - action="add", - name="local", - host="localhost", - port=8000, - username="admin", - password="admin" -) -configure_api_servers(action="set_default", name="local") -``` - -**When to use:** -- Before any other MCP tool -- When you get connection errors -- After MCP server restarts - ---- - -### Portfolio & Trading - -#### `get_portfolio_overview` - **Unified Portfolio View** -Get complete portfolio across CEX, DEX, LP positions, and orders in one call. - -```python -get_portfolio_overview( - account_names=["master_account"], # Optional filter - connector_names=["binance", "solana-mainnet-beta"], # Optional filter - include_balances=True, - include_perp_positions=True, - include_lp_positions=True, - include_active_orders=True, - as_distribution=False # Set True for percentage breakdown -) -``` - -**Use instead of:** -- `POST /portfolio/state` -- `POST /portfolio/distribution` -- `POST /trading/positions` -- `POST /trading/orders/active` - -**When to use:** -- "Show me my portfolio" -- "What are my balances?" -- "Do I have any open positions?" - ---- - -#### `place_order` - Place Exchange Orders -Execute buy/sell orders on CEX exchanges. - -```python -place_order( - connector_name="binance", - trading_pair="BTC-USDT", - trade_type="BUY", # or "SELL" - amount="$100", # Use $ prefix for USD value, or specify base amount "0.001" - order_type="MARKET", # or "LIMIT" - price="50000", # Required for LIMIT orders - account_name="master_account" -) -``` - -**Use instead of:** `POST /trading/orders` - ---- - -#### `search_history` - Search Trading History -Search orders, perpetual positions, or CLMM positions. - -```python -search_history( - data_type="orders", # or "perp_positions", "clmm_positions" - account_names=["master_account"], - connector_names=["binance"], - trading_pairs=["BTC-USDT"], - status="FILLED", # Optional: OPEN, CLOSED, FILLED, CANCELED - start_time=1609459200, # Unix timestamp - end_time=1609545600, - limit=50 -) -``` - -**Use instead of:** -- `POST /trading/orders/search` -- `POST /trading/positions` -- `POST /gateway/clmm/positions/search` - ---- - -#### `set_account_position_mode_and_leverage` - Configure Perpetuals -Set position mode and leverage for perpetual trading. - -```python -set_account_position_mode_and_leverage( - account_name="master_account", - connector_name="binance_perpetual", - trading_pair="BTC-USDT", # Required for leverage - position_mode="HEDGE", # or "ONE-WAY" - leverage=10 # Optional -) -``` - -**Use instead of:** -- `POST /trading/{account_name}/{connector_name}/position-mode` -- `POST /trading/{account_name}/{connector_name}/leverage` - ---- - -### Exchange Credentials & Setup - -#### `setup_connector` - Add Exchange Credentials -Progressive setup flow for adding exchange API keys. - -```python -# Step 1: List available exchanges -setup_connector() - -# Step 2: Get required fields for specific exchange -setup_connector(connector="binance") - -# Step 3: Select account (if needed) -# Step 4: Add credentials -setup_connector( - connector="binance", - credentials={ - "binance_api_key": "your_key", - "binance_api_secret": "your_secret" - }, - account="master_account" -) -``` - -**Use instead of:** -- `GET /connectors/` -- `GET /connectors/{connector_name}/config-map` -- `POST /accounts/add-credential/{account_name}/{connector_name}` - ---- - -### Market Data - -#### `get_prices` - Latest Market Prices -Get current prices for multiple trading pairs. - -```python -get_prices( - connector_name="binance", - trading_pairs=["BTC-USDT", "ETH-USDT", "SOL-USDT"] -) -``` - -**Use instead of:** `POST /market-data/prices` - ---- - -#### `get_candles` - Price History (OHLCV) -Get candlestick data for technical analysis. - -```python -get_candles( - connector_name="binance", - trading_pair="BTC-USDT", - interval="1h", # "1m", "5m", "15m", "30m", "1h", "4h", "1d" - days=30 # Days of history -) -``` - -**Use instead of:** `POST /market-data/historical-candles` - ---- - -#### `get_funding_rate` - Perpetual Funding Rates -Get funding rates for perpetual contracts. - -```python -get_funding_rate( - connector_name="binance_perpetual", - trading_pair="BTC-USDT" -) -``` - -**Use instead of:** `POST /market-data/funding-info` - ---- - -#### `get_order_book` - Order Book Analysis -Get order book data with advanced queries. - -```python -get_order_book( - connector_name="binance", - trading_pair="BTC-USDT", - query_type="snapshot", # "volume_for_price", "price_for_volume", etc. - query_value=50000, # Required for non-snapshot queries - is_buy=True # Required for non-snapshot queries -) -``` - -**Use instead of:** -- `POST /market-data/order-book` -- `POST /market-data/order-book/price-for-volume` -- `POST /market-data/order-book/volume-for-price` -- `POST /market-data/order-book/vwap-for-volume` - ---- - -### Gateway (DEX Trading) - -#### `manage_gateway_container` - Gateway Lifecycle -Start, stop, or check Gateway container status. - -```python -# Start Gateway -manage_gateway_container( - action="start", - config={ - "passphrase": "admin", - "image": "hummingbot/gateway:latest", - "port": 15888 - } -) - -# Check status -manage_gateway_container(action="get_status") - -# View logs -manage_gateway_container(action="get_logs", tail=100) - -# Restart -manage_gateway_container(action="restart") - -# Stop -manage_gateway_container(action="stop") -``` - -**Use instead of:** -- `POST /gateway/start` -- `GET /gateway/status` -- `POST /gateway/stop` -- `POST /gateway/restart` -- `GET /gateway/logs` - ---- - -#### `manage_gateway_config` - Configure DEX Resources -Manage chains, networks, tokens, connectors, pools, and wallets. - -```python -# List supported chains -manage_gateway_config(resource_type="chains", action="list") - -# List networks -manage_gateway_config(resource_type="networks", action="list") - -# Get specific network -manage_gateway_config( - resource_type="networks", - action="get", - network_id="solana-mainnet-beta" -) - -# List tokens on network -manage_gateway_config( - resource_type="tokens", - action="list", - network_id="solana-mainnet-beta", - search="USDC" # Optional search filter -) - -# Add token -manage_gateway_config( - resource_type="tokens", - action="add", - network_id="solana-mainnet-beta", - token_address="EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", - token_symbol="USDC", - token_decimals=6, - token_name="USD Coin" -) - -# Add wallet -manage_gateway_config( - resource_type="wallets", - action="add", - chain="solana", - private_key="your_private_key" -) - -# List DEX connectors -manage_gateway_config(resource_type="connectors", action="list") - -# List pools -manage_gateway_config( - resource_type="pools", - action="list", - connector_name="meteora", - network="mainnet-beta" -) -``` - -**Use instead of:** -- `GET /gateway/chains` -- `GET /gateway/networks` -- `GET /gateway/networks/{network_id}` -- `GET /gateway/networks/{network_id}/tokens` -- `POST /gateway/networks/{network_id}/tokens` -- `POST /accounts/gateway/add-wallet` -- `GET /gateway/connectors` -- `GET /gateway/pools` - ---- - -#### `manage_gateway_swaps` - DEX Swaps -Quote and execute swaps on DEX routers (Jupiter, 0x). - -```python -# Get quote -manage_gateway_swaps( - action="quote", - connector="jupiter", - network="solana-mainnet-beta", - trading_pair="SOL-USDC", - side="BUY", # or "SELL" - amount="1.0", # Amount of base token - slippage_pct="1.0" -) - -# Execute swap -manage_gateway_swaps( - action="execute", - connector="jupiter", - network="solana-mainnet-beta", - trading_pair="SOL-USDC", - side="BUY", - amount="1.0", - slippage_pct="1.0", - wallet_address="your_wallet_address" # Optional -) - -# Search swap history -manage_gateway_swaps( - action="search", - search_connector="jupiter", - search_network="solana-mainnet-beta", - status="CONFIRMED", # SUBMITTED, CONFIRMED, FAILED - limit=50 -) - -# Check transaction status -manage_gateway_swaps( - action="get_status", - transaction_hash="your_tx_hash" -) -``` - -**Use instead of:** -- `POST /gateway/swap/quote` -- `POST /gateway/swap/execute` -- `POST /gateway/swaps/search` -- `GET /gateway/swaps/{transaction_hash}/status` - ---- - -#### `explore_gateway_clmm_pools` - Discover CLMM Pools -Browse concentrated liquidity pools. - -```python -# List pools -explore_gateway_clmm_pools( - action="list_pools", - connector="meteora", - page=0, - limit=50, - search_term="SOL", # Optional filter - sort_key="volume", # volume, tvl, feetvlratio - order_by="desc", - include_unknown=True, - detailed=False # Set True for more columns -) - -# Get specific pool info -explore_gateway_clmm_pools( - action="get_pool_info", - connector="meteora", - network="solana-mainnet-beta", - pool_address="pool_address_here" -) -``` - -**Use instead of:** -- `GET /gateway/clmm/pools` -- `GET /gateway/clmm/pool-info` - ---- - -#### `manage_gateway_clmm_positions` - CLMM Liquidity Positions -Open, close, collect fees from concentrated liquidity positions. - -```python -# Open position -manage_gateway_clmm_positions( - action="open_position", - connector="meteora", - network="solana-mainnet-beta", - pool_address="pool_address", - lower_price="150", - upper_price="250", - base_token_amount="1.0", # Optional - quote_token_amount="200", # Optional - slippage_pct="1.0", - wallet_address="your_wallet", # Optional - extra_params={"strategyType": 0} # Connector-specific -) - -# Get positions for wallet/pool -manage_gateway_clmm_positions( - action="get_positions", - connector="meteora", - network="solana-mainnet-beta", - pool_address="pool_address", - wallet_address="your_wallet" -) - -# Collect fees -manage_gateway_clmm_positions( - action="collect_fees", - connector="meteora", - network="solana-mainnet-beta", - position_address="position_nft_address", - wallet_address="your_wallet" -) - -# Close position -manage_gateway_clmm_positions( - action="close_position", - connector="meteora", - network="solana-mainnet-beta", - position_address="position_nft_address", - wallet_address="your_wallet" -) -``` - -**Use instead of:** -- `POST /gateway/clmm/open` -- `POST /gateway/clmm/positions_owned` -- `POST /gateway/clmm/collect-fees` -- `POST /gateway/clmm/close` - ---- - -### Bot Management - -#### `explore_controllers` - Discover Trading Strategies -List and understand available trading controllers (strategies). - -```python -# List all controllers -explore_controllers(action="list") - -# List by type -explore_controllers( - action="list", - controller_type="directional_trading" # or "market_making", "generic" -) - -# Describe specific controller -explore_controllers( - action="describe", - controller_name="macd_bb_v1" -) - -# Describe specific config -explore_controllers( - action="describe", - config_name="my_strategy_config" -) -``` - -**Use instead of:** -- `GET /controllers/` -- `GET /controllers/{controller_type}/{controller_name}` -- `GET /controllers/configs/` - ---- - -#### `modify_controllers` - Create/Update Strategies -Create, update, or delete controller templates and configs. - -```python -# Create controller config -modify_controllers( - action="upsert", - target="config", - config_name="my_pmm_strategy", - config_data={ - "controller_name": "macd_bb_v1", - "controller_type": "directional_trading", - # ... other config parameters - } -) - -# Update bot-specific config -modify_controllers( - action="upsert", - target="config", - config_name="my_pmm_strategy", - config_data={...}, - bot_name="my_bot", - confirm_override=True -) - -# Delete config -modify_controllers( - action="delete", - target="config", - config_name="old_strategy" -) -``` - -**Use instead of:** -- `POST /controllers/configs/{config_name}` -- `PUT /controllers/configs/{config_name}` -- `DELETE /controllers/configs/{config_name}` -- `POST /controllers/{controller_type}/{controller_name}` - ---- - -#### `deploy_bot_with_controllers` - Deploy Trading Bot -Deploy a bot with controller configurations. - -```python -deploy_bot_with_controllers( - bot_name="my_trading_bot", - controllers_config=["strategy_config_1", "strategy_config_2"], - account_name="master_account", - max_global_drawdown_quote=1000, # Optional stop-loss - max_controller_drawdown_quote=500, # Optional per-strategy stop - image="hummingbot/hummingbot:latest" -) -``` - -**Use instead of:** `POST /bot-orchestration/deploy-v2-controllers` - ---- - -#### `get_active_bots_status` - Monitor Running Bots -Get status of all active trading bots. - -```python -get_active_bots_status() -``` - -**Returns:** Bot status, PnL, volume, latest logs, errors - -**Use instead of:** `GET /bot-orchestration/status` - ---- - -#### `get_bot_logs` - Detailed Bot Logs -Search and filter bot logs. - -```python -get_bot_logs( - bot_name="my_trading_bot", - log_type="error", # "error", "general", "all" - limit=50, - search_term="connection" # Optional filter -) -``` - -**Use instead of:** `GET /bot-orchestration/{bot_name}/status` (for logs) - ---- - -#### `manage_bot_execution` - Start/Stop Bots -Control bot and controller execution. - -```python -# Stop entire bot permanently -manage_bot_execution( - bot_name="my_trading_bot", - action="stop_bot" -) - -# Stop specific controllers -manage_bot_execution( - bot_name="my_trading_bot", - action="stop_controllers", - controller_names=["strategy_1", "strategy_2"] -) - -# Start/resume controllers -manage_bot_execution( - bot_name="my_trading_bot", - action="start_controllers", - controller_names=["strategy_1"] -) -``` - -**Use instead of:** -- `POST /bot-orchestration/stop-bot` -- `POST /bot-orchestration/stop-and-archive-bot/{bot_name}` - ---- - -## 📋 Direct API Endpoints (Use When MCP Tools Don't Exist) - -**Authentication:** All endpoints require HTTP Basic Auth. - -```bash -curl -u username:password http://localhost:8000/endpoint -``` - ---- - -### 🐳 Docker Management (`/docker`) - -``` -GET /docker/running -GET /docker/available-images/ -GET /docker/active-containers -GET /docker/exited-containers -POST /docker/pull-image/ -GET /docker/pull-status/ -POST /docker/clean-exited-containers -POST /docker/start-container/{container_name} -POST /docker/stop-container/{container_name} -POST /docker/remove-container/{container_name} -``` - -**Use Cases:** -- Check if Docker daemon is running -- Pull latest Hummingbot images -- Manage container lifecycle -- Clean up exited containers - ---- - -### 💳 Account Management (`/accounts`) - -**MCP tools exist** for most operations. Use direct API only for: - -``` -GET /accounts/ # List all accounts -POST /accounts/add-account # Create new account -POST /accounts/delete-account # Remove account -GET /accounts/{account_name}/credentials # List credentials -``` - -**Note:** Use `setup_connector` MCP tool for adding credentials instead of: -- `POST /accounts/add-credential/{account_name}/{connector_name}` -- `POST /accounts/delete-credential/{account_name}/{connector_name}` - ---- - -### 🔌 Connector Information (`/connectors`) - -**MCP tool exists:** Use `setup_connector()` for progressive flow. - -Direct API endpoints: -``` -GET /connectors/ # List all exchanges -GET /connectors/{connector_name}/config-map # Get required credentials -GET /connectors/{connector_name}/order-types # Supported order types -GET /connectors/{connector_name}/trading-rules # Min/max amounts, tick sizes -``` - -**Example:** -```bash -# Get Binance trading rules -curl -u admin:admin "http://localhost:8000/connectors/binance/trading-rules?trading_pairs=BTC-USDT,ETH-USDT" -``` - ---- - -### 📊 Portfolio Management (`/portfolio`) - -**MCP tool exists:** Use `get_portfolio_overview()` instead of these: - -``` -POST /portfolio/state # Current balances (use MCP tool!) -POST /portfolio/distribution # Token breakdown (use MCP tool!) -POST /portfolio/accounts-distribution # Account allocation (use MCP tool!) -POST /portfolio/history # Historical portfolio values -``` - -**When to use direct API:** -- Need cursor-based pagination for portfolio history -- Building custom portfolio analytics - ---- - -### 💹 Trading Operations (`/trading`) - -**MCP tools exist** for most operations: -- `place_order()` for placing orders -- `search_history()` for order/trade history -- `get_portfolio_overview()` for active orders and positions -- `set_account_position_mode_and_leverage()` for perpetual settings - -**Direct API only needed for:** - -``` -GET /trading/{account_name}/{connector_name}/position-mode - # Get current position mode (HEDGE/ONEWAY) -``` - ---- - -### 🤖 Bot Orchestration (`/bot-orchestration`) - -**MCP tools exist:** -- `deploy_bot_with_controllers()` - Deploy bots -- `get_active_bots_status()` - Monitor bots -- `get_bot_logs()` - View logs -- `manage_bot_execution()` - Start/stop - -**Direct API for advanced use:** - -``` -GET /bot-orchestration/bot-runs # Bot run history -GET /bot-orchestration/bot-runs/stats # Aggregate stats -GET /bot-orchestration/bot-runs/{bot_run_id} # Specific run details -POST /bot-orchestration/deploy-v2-script # Deploy V2 scripts -POST /bot-orchestration/start-bot # Start V1 bots -GET /bot-orchestration/mqtt # MQTT status -GET /bot-orchestration/{bot_name}/history # Bot performance history -``` - ---- - -### 📋 Strategy Management - -#### Controllers (`/controllers`) - -**MCP tools:** `explore_controllers()`, `modify_controllers()` - -**Direct API for:** -``` -GET /controllers/{controller_type}/{controller_name}/config/template - # Get JSON template for config -POST /controllers/{controller_type}/{controller_name}/config/validate - # Validate config before deploying -``` - -#### Scripts (`/scripts`) - -``` -GET /scripts/ # List available scripts -GET /scripts/{script_name} # Get script code -POST /scripts/{script_name} # Upload custom script -DELETE /scripts/{script_name} # Remove script -GET /scripts/{script_name}/config/template # Get config template -GET /scripts/configs/ # List script configs -POST /scripts/configs/{config_name} # Create config -DELETE /scripts/configs/{config_name} # Delete config -``` - ---- - -### 📊 Market Data (`/market-data`) - -**MCP tools exist:** -- `get_prices()` - Current prices -- `get_candles()` - OHLCV data -- `get_funding_rate()` - Funding rates -- `get_order_book()` - Order book analysis - -**Direct API for real-time feeds:** - -``` -POST /market-data/candles - # Start persistent candle feed (WebSocket-like) - Body: { - "connector_name": "binance", - "trading_pairs": ["BTC-USDT"], - "intervals": ["1m", "5m"], - "max_records": 1000 - } - -GET /market-data/active-feeds - # List active real-time feeds - -GET /market-data/settings - # Get market data configuration -``` - ---- - -### 🔄 Backtesting (`/backtesting`) - -**No MCP tool.** Use direct API: - -``` -POST /backtesting/run-backtesting - Body: { - "config": { - "controller_name": "directional_trading.macd_bb_v1", - "controller_type": "directional_trading", - "controller_config": [...], - "start_time": 1609459200, - "end_time": 1609545600, - "backtesting_resolution": "1m", - "trade_cost": 0.0006 - } - } -``` - ---- - -### 📈 Archived Bot Analytics (`/archived-bots`) - -**No MCP tool.** Use direct API for analyzing stopped bots: - -``` -GET /archived-bots/ # List archived databases -GET /archived-bots/{db_path}/status # Bot configuration -GET /archived-bots/{db_path}/summary # Performance summary -GET /archived-bots/{db_path}/performance # Detailed metrics -GET /archived-bots/{db_path}/orders # Historical orders -GET /archived-bots/{db_path}/trades # Trade history -GET /archived-bots/{db_path}/positions # Position history -GET /archived-bots/{db_path}/controllers # Controller configs -GET /archived-bots/{db_path}/executors # Executor data -``` - ---- - -### 🌐 Gateway Endpoints (DEX & Blockchain Operations) - -**MCP tools exist** for most Gateway operations. Use direct API only for specific needs. - -#### Gateway Lifecycle (`/gateway`) - -``` -GET /gateway/status # Get Gateway status -POST /gateway/start # Start Gateway container -POST /gateway/stop # Stop Gateway container -POST /gateway/restart # Restart Gateway container -GET /gateway/logs # Get Gateway logs -``` - -**Note:** Use `manage_gateway_container` MCP tool instead of these endpoints. - ---- - -#### Gateway Configuration (`/gateway`) - -``` -GET /gateway/chains # List supported blockchain chains -GET /gateway/connectors # List available DEX connectors -GET /gateway/connectors/{connector_name} - # Get specific connector configuration -POST /gateway/connectors/{connector_name} - # Update connector configuration - -GET /gateway/networks # List all networks -GET /gateway/networks/{network_id} - # Get specific network config (e.g., "solana-mainnet-beta") -POST /gateway/networks/{network_id} - # Update network configuration - -GET /gateway/networks/{network_id}/tokens - # List tokens available on network -POST /gateway/networks/{network_id}/tokens - # Add custom token to network - Body: { - "token_address": "token_contract_address", - "token_symbol": "SYMBOL", - "token_decimals": 18, - "token_name": "Token Name" - } -DELETE /gateway/networks/{network_id}/tokens/{token_address} - # Remove token from network - -GET /gateway/pools # List liquidity pools -POST /gateway/pools # Add custom pool -``` - -**Note:** Use `manage_gateway_config` MCP tool for easier configuration management. - ---- - -### 💱 Gateway Swaps (`/gateway/swap`) - -**MCP tool exists:** Use `manage_gateway_swaps()` for DEX trading. - -``` -POST /gateway/swap/quote - # Get swap quote with pricing and gas estimates - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "jupiter", - "base": "SOL", - "quote": "USDC", - "amount": "1.0", - "side": "BUY", - "allowedSlippage": "1.0" - } - -POST /gateway/swap/execute - # Execute the swap transaction - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "jupiter", - "address": "wallet_address", - "base": "SOL", - "quote": "USDC", - "amount": "1.0", - "side": "BUY", - "allowedSlippage": "1.0" - } - -GET /gateway/swaps/{transaction_hash}/status - # Check transaction status - -POST /gateway/swaps/search - # Search swap transaction history - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "jupiter", - "address": "wallet_address", - "status": "CONFIRMED", # SUBMITTED, CONFIRMED, FAILED - "start_time": 1609459200, - "end_time": 1609545600, - "limit": 50 - } - -GET /gateway/swaps/summary - # Get aggregated swap statistics -``` - ---- - -### 🏊 Gateway CLMM (`/gateway/clmm`) - -**MCP tools exist:** -- `explore_gateway_clmm_pools()` - Pool discovery -- `manage_gateway_clmm_positions()` - Position management - -``` -GET /gateway/clmm/pools - # List CLMM pools with filtering and sorting - Query params: connector, page, limit, search, sort_key, order_by - -GET /gateway/clmm/pool-info - # Get detailed info for specific pool - Query params: chain, network, connector, token0, token1, fee - -POST /gateway/clmm/open - # Open new concentrated liquidity position - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "meteora", - "address": "wallet_address", - "pool_address": "pool_address", - "lower_price": "150.0", - "upper_price": "250.0", - "base_token_amount": "1.0", - "quote_token_amount": "200.0", - "slippage": "1.0" - } - -POST /gateway/clmm/close - # Close CLMM position (remove all liquidity) - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "meteora", - "address": "wallet_address", - "position_address": "position_nft_address" - } - -POST /gateway/clmm/collect-fees - # Collect accumulated fees from position - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "meteora", - "address": "wallet_address", - "position_address": "position_nft_address" - } - -POST /gateway/clmm/positions_owned - # Get all positions owned by wallet in specific pool - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "meteora", - "address": "wallet_address", - "pool_address": "pool_address" - } - -GET /gateway/clmm/positions/{position_address}/events - # Get event history for specific position - -POST /gateway/clmm/positions/search - # Search CLMM positions with filters - Body: { - "chain": "solana", - "network": "mainnet-beta", - "connector": "meteora", - "address": "wallet_address", - "status": "OPEN", # OPEN, CLOSED - "start_time": 1609459200, - "end_time": 1609545600 - } -``` - ---- - -## 🆘 Common Error Handling - -### MCP Connection Lost - -**Error:** -``` -Error executing tool: ❌ Failed to connect to Hummingbot API -Connection failed after 3 attempts. -``` - -**Solution:** Reconnect immediately: -```python -configure_api_servers(action="add", name="local", host="localhost", port=8000, username="admin", password="admin") -configure_api_servers(action="set_default", name="local") -# Retry your operation -``` - -### Authentication Errors (401) - -Check credentials in `.env` file match what you're using. - -### Validation Errors (422) - -Read the error detail - usually missing required parameters or invalid values. - -### Resource Not Found (404) - -- Bot doesn't exist -- Connector name misspelled -- Database path incorrect - ---- - -## 💡 AI Assistant Tips - -1. **Always use MCP tools first** - They handle complexity for you -2. **Start with `configure_api_servers`** - Establishes connection -3. **Use `get_portfolio_overview`** - Single call for complete portfolio -4. **Progressive disclosure** - Tools like `setup_connector` guide you step-by-step -5. **Check MCP tool errors** - Reconnect immediately if connection fails -6. **Read error messages** - They usually tell you exactly what's wrong - ---- - -## 📚 Additional Resources - -- **Interactive API Docs**: http://localhost:8000/docs (Swagger UI) -- **Setup Guides**: See CLAUDE.md, AGENTS.md, GEMINI.md -- **Architecture**: See README.md -- **Troubleshooting**: See README.md Troubleshooting section diff --git a/CLAUDE.md b/CLAUDE.md deleted file mode 100644 index 8011ce58..00000000 --- a/CLAUDE.md +++ /dev/null @@ -1,726 +0,0 @@ -# Using Hummingbot API with Claude - -This guide shows you how to interact with the Hummingbot API using Claude (claude.ai) and Claude Code (CLI). - -## 🤖 Method 1: MCP Server (Recommended) - -The Hummingbot MCP server provides natural language access to all API functionality through Claude Desktop or Claude Code. - -### Claude Desktop Setup - -1. **Enable MCP during Hummingbot API setup**: - ```bash - ./setup.sh # Answer "y" to "Enable MCP server for AI assistant usage?" - ``` - -2. **Configure Claude Desktop**: - - Open (or create) `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS) - - Or `%APPDATA%\Claude\claude_desktop_config.json` (Windows) - - Add this configuration: - ```json - { - "mcpServers": { - "hummingbot": { - "command": "docker", - "args": ["run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest"] - } - } - } - ``` - -3. **Restart Claude Desktop** - -4. **Start using natural language**: - - "What are my current portfolio balances?" - - "Show me active trading bots" - - "Create a new PMM strategy for ETH-USDT on Binance" - - "What's the performance of my bots this week?" - -### Claude Code (CLI) Setup - -1. **Add the MCP server**: - ```bash - claude mcp add --transport stdio hummingbot -- docker run --rm -i -e HUMMINGBOT_API_URL=http://host.docker.internal:8000 -v hummingbot_mcp:/root/.hummingbot_mcp hummingbot/hummingbot-mcp:latest - ``` - -2. **Use in your terminal**: - ```bash - # Claude Code automatically uses the MCP server - # Just ask questions naturally in your terminal - ``` - -3. **Manage the connection**: - ```bash - claude mcp list # List configured servers - claude mcp get hummingbot # View server details - claude mcp remove hummingbot # Remove server - ``` - -## 🔧 Method 2: Direct API Access (Fallback) - -If MCP is unavailable, you can interact with the API directly. See @API_REFERENCE.md for the full endpoint list. - -## API Reference - -The API is accessible at `http://localhost:8000` with interactive Swagger docs at `http://localhost:8000/docs`. - -Refer to @API_REFERENCE.md for the full set of endpoints. - -### Authentication - -All endpoints require HTTP Basic Authentication. Use the username and password configured during setup. - -See @env for the current environment variables. - -Example: -```bash -curl -u username:password http://localhost:8000/endpoint -``` - -## Common Development Commands - -Refer to the Installation & Setup section in @README.md for more information. - -### Environment Setup -```bash -# First-time setup - creates Docker services and environment -chmod +x setup.sh -./setup.sh - -# Install development environment (requires Conda) -make install - -# Run in development mode with hot-reloading -./run.sh --dev - -# Run in production mode (Docker container) -./run.sh -``` - -### Code Quality & Testing -```bash -# Format code (automatically enforced by pre-commit hooks) -black --line-length 130 . -isort --line-length 130 --profile black . - -# Install pre-commit hooks -make install-pre-commit - -# Access API documentation -# Visit http://localhost:8000/docs after starting the API -``` - -### Docker Operations -```bash -# Build Docker image -make build - -# Deploy with Docker Compose -make deploy - -# Check running containers -docker ps - -# View container logs -docker logs hummingbot-api -``` - -## High-Level Architecture - -### Core Service Architecture -The API follows a microservice pattern where each trading bot runs in its own Docker container, communicating through MQTT with the main API service. - -**Key Components:** -1. **FastAPI Application** (`main.py`): Central API with lifespan management for background services -2. **Bot Orchestrator** (`services/bots_orchestrator.py`): Manages bot lifecycle - deployment, monitoring, and archival -3. **Docker Service** (`services/docker_service.py`): Wrapper around Docker SDK for container operations -4. **MQTT Manager** (`utils/mqtt_manager.py`): Handles real-time communication with bot instances -5. **Repository Pattern** (`database/`): Clean data access layer with async PostgreSQL operations - -### Request Flow Example -1. User sends authenticated request to API endpoint -2. Router validates request and calls appropriate service -3. Service orchestrates operations (e.g., starting a bot involves Docker service + MQTT setup) -4. Bot containers publish updates via MQTT -5. API aggregates real-time data from MQTT and database - -### Bot Instance Management -Each bot maintains isolated state in `/bots/instances/hummingbot-{name}/`: -- Configuration files in `conf/` -- SQLite database in `data/` -- Execution logs in `logs/` - -The API never directly modifies bot files - all communication happens through MQTT commands. - -### Authentication & Security -- HTTP Basic Auth for API access (configured in `.env`) -- Config password encrypts exchange credentials using Fernet -- Credentials stored encrypted in `/bots/credentials/` - -### Database Schema -PostgreSQL stores aggregated data from all bots: -- `orders`: All order history with exchange info -- `trades`: Executed trades with fees -- `account_balances`: Periodic balance snapshots -- `positions`: Perpetual contract positions -- `funding_payments`: Funding payment history - -### Real-time Data Flow -1. Bots publish state updates to MQTT topics -2. API subscribes to relevant topics -3. Services process updates and store in PostgreSQL -4. Clients can query aggregated data via REST endpoints - -## Key Development Patterns - -### Async-First Design -All database operations and external calls use async/await. When adding new features: -```python -async def your_function(): - async with get_db_session() as session: - # Database operations -``` - -### Service Layer Pattern -Business logic lives in `/services`, not in routers. Routers should only handle HTTP concerns. - -### Error Handling -The API uses FastAPI's exception handling. Services should raise clear exceptions that routers can catch. - -### Configuration Management -All configuration uses Pydantic Settings (`config.py`). Environment variables override defaults. - -## Important Considerations - -### Bot State Synchronization -- Account balances update every `ACCOUNT_UPDATE_INTERVAL` minutes -- Real-time updates come from MQTT, historical data from database -- Always check both sources for complete picture - -### Docker Container Lifecycle -- Starting a bot: Creates container, waits for MQTT connection -- Stopping a bot: Graceful shutdown, optional archival to S3/local -- Failed containers remain for debugging (clean with `/docker/clean-exited`) - -### Market Data Feeds -- Feeds auto-cleanup after inactivity -- Each feed runs in a background task -- Memory management crucial for long-running feeds - -### Performance Optimization -- Use pagination for large datasets -- Cursor-based pagination preferred over offset -- Background tasks for long operations (archival, bulk updates) - -## Troubleshooting - -### Common Errors - -#### Password Verification File Missing -**Error**: `[Errno 2] No such file or directory: 'bots/credentials/master_account/.password_verification'` - -**Cause**: This error occurs when trying to add credentials before running the initial setup. - -**Solution**: Run `./setup.sh` to initialize the environment. This script: -- Creates necessary directory structures -- Sets up Docker services (PostgreSQL, MQTT broker) -- Initializes the master_account with required configuration files -- Creates the `.password_verification` file needed for credential encryption - -**Prevention**: Always run `./setup.sh` before attempting to add exchange credentials or perform account operations. - -## Common Workflows - -### 1. Adding Exchange Credentials -1. List available connectors: `GET /connectors/` - - Returns all supported exchanges (binance, coinbase, kraken, etc.) -2. Get required configuration fields: `GET /connectors/{connector_name}/config-map` - - Returns which fields are needed (api_key, api_secret, etc.) - - Shows field types and whether they're required -3. Gather credential values from the user - - Ask the user to provide values for each required field - - Ensure all required fields from step 2 are collected -4. Add credentials: `POST /accounts/add-credential/{account_name}/{connector_name}` - - Provide the required fields from config-map - - Credentials are encrypted and stored securely - -Example workflow: -```bash -# 1. Check what connectors are available -# Why: First, I need to see which exchanges are supported by the API -GET /connectors/ - -# 2. Get config requirements for Binance -# Why: I need to know what credentials Binance requires so I can ask you for the right information -GET /connectors/binance/config-map -# Returns: {"binance_api_key": {"prompt": "Enter your Binance API key", "is_secure": true, "is_connect_key": true}, -# "binance_api_secret": {"prompt": "Enter your Binance API secret", "is_secure": true}} - -# 3. Gather credentials from user -# Why: I need to collect your API credentials to connect to your Binance account -# Ask user: "Please provide your Binance API key" -# Ask user: "Please provide your Binance API secret" - -# 4. Add credentials -# Why: Now I will securely store your credentials encrypted with your config password -POST /accounts/add-credential/my_account/binance -Body: { - "binance_api_key": "your_api_key_here", - "binance_api_secret": "your_api_secret_here" -} -``` - -#### XRPL Example: -```bash -# 1. Get XRPL config requirements -# Why: I need to check what configuration XRPL connector requires -GET /connectors/xrpl/config-map -# Returns: ["xrpl_secret_key", "wss_node_urls", "custom_markets", "max_request_per_minute"] - -# 2. Gather XRPL credentials from user -# Why: I need to collect your XRPL wallet credentials and optional configuration -# Ask user: "Please provide your XRPL secret key" -# Ask user: "Please provide WebSocket node URLs (optional, defaults to public nodes)" -# Ask user: "Please provide custom markets configuration (optional)" -# Ask user: "Please provide max requests per minute (optional)" - -# 3. Add XRPL credentials -# Why: Now I will securely store your XRPL credentials encrypted with your config password -POST /accounts/add-credential/my_account/xrpl -Body: { - "xrpl_secret_key": "your_xrpl_secret_key_here", - "wss_node_urls": ["wss://s1.ripple.com", "wss://s2.ripple.com"], // optional - "custom_markets": {}, // optional - "max_request_per_minute": 300 // optional -} -``` - -### 2. Analyzing Portfolio -1. Get current portfolio state: `POST /portfolio/state` - - Returns real-time balances across all accounts - - Can filter by specific accounts or connectors -2. Retrieve historical data: `POST /portfolio/history` - - Returns time-series portfolio values - - Supports cursor-based pagination for large datasets -3. Analyze token distribution: `POST /portfolio/distribution` - - Shows percentage allocation by token - - Aggregates across all exchanges -4. Review account distribution: `POST /portfolio/accounts-distribution` - - Shows percentage allocation by account - - Useful for risk management - -Example workflow: -```bash -# 1. Get current portfolio snapshot -# Why: I'm checking your current balances across selected accounts and exchanges -POST /portfolio/state -Body: { - "account_names": ["trading_account", "savings_account"], - "connectors": ["binance", "coinbase"] -} -# Returns: {"balances": [{"token": "BTC", "total": 0.5, "available": 0.4, "locked": 0.1, "usd_value": 25000}...]} - -# 2. Get historical portfolio performance -# Why: I'm retrieving your portfolio history to analyze performance over time -POST /portfolio/history -Body: { - "account_names": ["trading_account"], - "limit": 100, - "cursor": null -} -# Returns: {"data": [{"timestamp": 1234567890, "total_usd_value": 50000, "balances": {...}}...], "next_cursor": "..."} - -# 3. Analyze token distribution -# Why: I'm calculating how your portfolio is distributed across different tokens -POST /portfolio/distribution -Body: { - "account_names": ["trading_account", "savings_account"] -} -# Returns: {"BTC": {"amount": 0.5, "usd_value": 25000, "percentage": 50.0}, -# "ETH": {"amount": 10, "usd_value": 20000, "percentage": 40.0}...} - -# 4. Check account distribution -# Why: I'm analyzing how your total portfolio value is spread across your different accounts -POST /portfolio/accounts-distribution -Body: {} # No filter returns all accounts -# Returns: {"trading_account": {"usd_value": 40000, "percentage": 80.0}, -# "savings_account": {"usd_value": 10000, "percentage": 20.0}} -``` - -### 3. Fetching Market Data -1. Start real-time candle feed: `POST /market-data/candles` - - Creates persistent websocket connection - - Auto-cleanup after inactivity -2. Get current prices: `POST /market-data/prices` - - Returns spot prices for multiple pairs -3. Analyze order book: `POST /market-data/order-book` - - Returns bid/ask levels with depth -4. Calculate market metrics: Various order book analytics endpoints - - Price impact for volume - - VWAP calculations - - Volume at price levels - -Example workflow: -```bash -# 1. Start real-time candle feed -# Why: I'm establishing a real-time data feed to monitor price movements -POST /market-data/candles -Body: { - "connector_name": "binance", - "trading_pairs": ["BTC-USDT", "ETH-USDT"], - "intervals": ["1m", "5m"], - "max_records": 1000 -} -# Returns: {"feed_id": "candles_binance_123", "status": "running"} - -# 2. Get current prices -# Why: I need to check the current market prices before placing any orders -POST /market-data/prices -Body: { - "connector_name": "binance", - "trading_pairs": ["BTC-USDT", "ETH-USDT"] -} -# Returns: {"BTC-USDT": {"price": 50000.00, "timestamp": 1234567890}, -# "ETH-USDT": {"price": 3000.00, "timestamp": 1234567890}} - -# 3. Get order book snapshot -# Why: I'm analyzing market depth to understand liquidity and potential price impact -POST /market-data/order-book -Body: { - "connector_name": "binance", - "trading_pair": "BTC-USDT", - "depth": 20 -} -# Returns: {"timestamp": 1234567890, -# "bids": [[49999.00, 0.5], [49998.00, 1.0]...], -# "asks": [[50001.00, 0.3], [50002.00, 0.8]...]} - -# 4. Calculate VWAP for large order -# Why: I'm calculating the average price you would pay if you execute a large order -POST /market-data/order-book/vwap-for-volume -Body: { - "connector_name": "binance", - "trading_pair": "BTC-USDT", - "volume": 10, - "side": "buy" -} -# Returns: {"vwap": 50015.50, "avg_price": 50015.50} -``` - -### 4. Executing Trades -1. Check connector capabilities: `GET /connectors/{connector_name}/order-types` - - Returns supported order types (limit, market, stop-loss, etc.) -2. Get trading rules: `GET /connectors/{connector_name}/trading-rules` - - Returns min/max order amounts, tick sizes, minimum notional values -3. Verify current price: `POST /market-data/prices` - - Ensures order price is reasonable -4. Place order: `POST /trading/orders` - - Must respect trading rules constraints -5. Monitor order status: `POST /trading/orders/active` - - Track order execution progress -6. Cancel if needed: `POST /trading/{account_name}/{connector_name}/orders/{order_id}/cancel` - -Example workflow: -```bash -# 1. Check supported order types -# Why: I need to verify what order types Binance supports before placing orders -GET /connectors/binance/order-types -# Returns: ["limit", "limit_maker", "market", "stop_loss_limit"] - -# 2. Get trading rules for BTC-USDT -# Why: I'm checking minimum order sizes and price increments to ensure your order is valid -GET /connectors/binance/trading-rules?trading_pairs=BTC-USDT -# Returns: {"BTC-USDT": {"min_order_size": 0.00001, "max_order_size": 9000, -# "min_price_increment": 0.01, "min_base_amount_increment": 0.00001, -# "min_notional_size": 5.0}} - -# 3. Check current market price -# Why: I need to know the current price to place a competitive limit order -POST /market-data/prices -Body: {"connector_name": "binance", "trading_pairs": ["BTC-USDT"]} -# Returns: {"BTC-USDT": {"price": 50000.00, "timestamp": 1234567890}} - -# 4. Place limit order -# Why: I'm placing your buy order slightly below market price to get a better fill -POST /trading/orders -Body: { - "account_name": "trading_account", - "connector_name": "binance", - "trading_pair": "BTC-USDT", - "order_type": "limit", - "trade_type": "buy", - "price": 49900.00, # Below market for limit buy - "amount": 0.001 # Total value: 49.90 USD (above min_notional_size) -} -# Returns: {"client_order_id": "HMBot-123456", "exchange_order_id": "BIN-789", "status": "open"} - -# 5. Monitor active orders -# Why: I'm checking the status of your order to see if it has been filled -POST /trading/orders/active -Body: { - "account_names": ["trading_account"], - "connectors": ["binance"] -} -# Returns: {"data": [{"client_order_id": "HMBot-123456", "status": "open", "filled_amount": 0}...]} - -# 6. Cancel order if needed -# Why: If the order hasn't filled and you want to cancel it, I can do that now -POST /trading/trading_account/binance/orders/HMBot-123456/cancel -# Returns: {"success": true, "exchange_order_id": "BIN-789"} -``` - -### 5. Orchestrating Bots -1. Create account and add credentials: See workflow 1 -2. Choose strategy type: - - V1 Scripts: Traditional Hummingbot scripts - - V2 Scripts: Next-gen scripts with enhanced features - - V2 Controllers: Advanced multi-strategy controllers -3. Get strategy configuration template: `GET /scripts/{script_name}/config-template` -4. Deploy bot with configuration: `POST /bot-orchestration/start-bot` or `POST /bot-orchestration/deploy-v2-script` -5. Monitor bot status: `GET /bot-orchestration/{bot_name}/status` -6. Review performance: `GET /bot-orchestration/{bot_name}/history` -7. Stop and archive when done: `POST /bot-orchestration/stop-and-archive-bot/{bot_name}` - -Example workflow: -```bash -# 1. List available V2 scripts -# Why: I need to see what automated trading strategies are available -GET /scripts/ -# Returns: ["v2_directional_rsi", "v2_bollinger_dca", "v2_macd_bb_v1"...] - -# 2. Get configuration template -# Why: I'm checking what parameters the RSI strategy needs so I can configure it properly -GET /scripts/v2_directional_rsi/config-template -# Returns: {"script_name": "v2_directional_rsi", "config": {"connector": "", "trading_pair": "", "rsi_period": 14...}} - -# 3. Deploy V2 script bot -# Why: I'm launching your automated RSI trading bot with your specified configuration -POST /bot-orchestration/deploy-v2-script -Body: { - "bot_name": "rsi_bot_btc", - "script": "v2_directional_rsi", - "config": { - "connector": "binance", - "trading_pair": "BTC-USDT", - "rsi_period": 14, - "rsi_oversold": 30, - "rsi_overbought": 70, - "order_amount": 0.001 - } -} -# Returns: {"bot_name": "rsi_bot_btc", "status": "starting", "container_name": "hummingbot-rsi_bot_btc"} - -# 4. Check bot status -# Why: I'm verifying that your bot is running properly and connected to the exchange -GET /bot-orchestration/rsi_bot_btc/status -# Returns: {"bot_name": "rsi_bot_btc", "status": "running", "mqtt_connected": true, -# "last_update": 1234567890, "active_orders": 1} - -# 5. Get bot performance history -# Why: I'm retrieving your bot's trading performance to analyze its effectiveness -GET /bot-orchestration/rsi_bot_btc/history?start_time=1234567800&end_time=1234567890 -# Returns: {"orders": [...], "trades": [...], "performance": {"total_pnl": 150.50, "win_rate": 0.65}} - -# 6. Stop and archive bot -# Why: I'm stopping your bot and archiving its data for future analysis -POST /bot-orchestration/stop-and-archive-bot/rsi_bot_btc -# Returns: {"status": "stopped", "archive_path": "/bots/archived/rsi_bot_btc_20240704.tar.gz"} -``` - -### 6. Managing Gateway Container (For DEX Trading) -Gateway is required for decentralized exchange (DEX) trading. Use the `manage_gateway_container` MCP tool to control Gateway lifecycle. - -1. Configure API connection (one-time setup) -2. Start Gateway with configuration -3. Verify Gateway status -4. Manage Gateway lifecycle (restart/stop as needed) - -Example workflow using MCP: -```python -# 1. Configure API connection (first time only) -# Why: I need to authenticate with the Hummingbot API to manage Gateway -send_request({ - "jsonrpc": "2.0", - "id": 1, - "method": "tools/call", - "params": { - "name": "configure_api_servers", - "arguments": { - "api_url": "http://host.docker.internal:8000", - "username": "admin", - "password": "admin" - } - } -}) -# Returns: {"result": {"content": [{"type": "text", "text": "API configuration saved successfully"}]}} - -# 2. Start Gateway container -# Why: I'm launching the Gateway service for DEX trading with your specified passphrase -send_request({ - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "start", - "config": { - "passphrase": "admin", - "dev_mode": true, - "image": "hummingbot/gateway:latest", - "port": 15888 - } - } - } -}) -# Returns: Gateway container started successfully at http://localhost:15888 - -# 3. Check Gateway status -# Why: I'm verifying that Gateway is running and accessible -send_request({ - "jsonrpc": "2.0", - "id": 3, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "get_status" - } - } -}) -# Returns: {"status": "running", "container_id": "abc123...", "port": 15888, "dev_mode": true} - -# 4. Restart Gateway (if needed) -# Why: If Gateway becomes unresponsive, I can restart it to restore functionality -send_request({ - "jsonrpc": "2.0", - "id": 4, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "restart" - } - } -}) -# Returns: Gateway container restarted successfully - -# 5. Stop Gateway (when done with DEX trading) -# Why: I'm shutting down Gateway to free up resources when you're not using DEX features -send_request({ - "jsonrpc": "2.0", - "id": 5, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "stop" - } - } -}) -# Returns: Gateway container stopped successfully -``` - -#### Natural Language Examples (Claude Code) -If you're using Claude Code CLI, you can use natural language: -- "Start Gateway in development mode with passphrase 'admin'" -- "Check if Gateway is running" -- "Restart the Gateway container" -- "Stop Gateway" - -#### Important Notes -- **Development mode** (`dev_mode: true`): HTTP access on port 15888, Swagger UI available -- **Production mode** (`dev_mode: false`): HTTPS with certificates required, more secure -- **Passphrase**: Used to encrypt/decrypt DEX wallet keys, store it securely -- **Port**: Default is 15888, must be available on your system -- Gateway URL will be: `http://localhost:15888` (dev) or `https://localhost:15888` (prod) - -## MCP Tools Best Practices - -### Using `configure_api_servers` for Connection Management - -**Before using any MCP tools**, always ensure the API server is properly configured: - -```python -# Check if connection is working - if any MCP tool fails, reconnect: -configure_api_servers(action="add", name="local", host="localhost", port=8000, username="admin", password="admin") -configure_api_servers(action="set_default", name="local") -``` - -### Using `get_portfolio_overview` for Token Balances - -**Preferred method for checking balances**: -- Use `get_portfolio_overview()` instead of direct API calls -- Includes CEX balances, DEX balances, LP positions, and active orders in one call -- Automatically handles all account types (Hyperliquid, Solana, Ethereum, etc.) - -```python -# Get complete portfolio overview -get_portfolio_overview( - include_balances=True, - include_perp_positions=False, - include_lp_positions=True, - include_active_orders=True, - as_distribution=False -) -``` - -### Common MCP Connection Issue - -**Error**: -``` -Error executing tool get_portfolio_overview: ❌ Failed to connect to Hummingbot API at http://docker.host.internal:8000 - -Connection failed after 3 attempts. - -💡 Solutions: - 1. Check if the API is running and accessible - 2. Verify your credentials are correct - 3. Use 'configure_api_servers' tool for setup - -Original error: Cannot connect to host docker.host.internal:8000 ssl:default [Name or service not known] -``` - -**Root Cause**: The MCP tool loses connection to the API server. This happens when: -- MCP server reconnects/restarts -- API credentials are not cached -- Network configuration changes - -**Solution**: Reconfigure the API server connection before retrying: - -```python -# Step 1: Add server configuration -configure_api_servers( - action="add", - name="local", - host="localhost", - port=8000, - username="admin", - password="admin" -) - -# Step 2: Set as default -configure_api_servers(action="set_default", name="local") - -# Step 3: Retry the operation -get_portfolio_overview(include_balances=True) -``` - -**Prevention**: Always check connection before using other MCP tools. If you see any connection error, immediately run `configure_api_servers` to restore the connection. - -## Error Codes - -- `400`: Bad Request - Invalid parameters -- `401`: Unauthorized - Authentication required -- `404`: Not Found - Resource doesn't exist -- `422`: Unprocessable Entity - Validation error -- `500`: Internal Server Error - Server issue - -## Rate Limiting - -No built-in rate limiting. Consider implementing client-side throttling for production use. - -## WebSocket Support - -Not available. Use polling for real-time updates or integrate with MQTT broker directly for bot events. diff --git a/GEMINI.md b/GEMINI.md deleted file mode 100644 index 24acfa60..00000000 --- a/GEMINI.md +++ /dev/null @@ -1,348 +0,0 @@ -# Using Hummingbot API with Gemini - -This guide shows you how to interact with the Hummingbot API using Google Gemini. - -## 🤖 Method 1: MCP Server (Recommended) - -The Hummingbot MCP server provides natural language access to all API functionality through Gemini. - -### Setup via Gemini CLI - -1. **Enable MCP during Hummingbot API setup**: - ```bash - ./setup.sh # Answer "y" to "Enable MCP server for AI assistant usage?" - ``` - -2. **Add the MCP server using Gemini CLI**: - ```bash - gemini mcp add hummingbot \ - --command "docker" \ - --args "run" "--rm" "-i" "-e" "HUMMINGBOT_API_URL=http://host.docker.internal:8000" "-v" "hummingbot_mcp:/root/.hummingbot_mcp" "hummingbot/hummingbot-mcp:latest" \ - --protocol stdio - ``` - -3. **Verify the server was added**: - ```bash - gemini mcp list - ``` - -4. **Start using natural language**: - - "What are my current portfolio balances?" - - "Show me active trading bots" - - "Create a new market making strategy for SOL-USDT" - - "What's the performance of my bots today?" - -### Manual Configuration (Alternative) - -#### For Gemini CLI (Global Configuration) - -Create or edit `~/.gemini/settings.json`: - -```json -{ - "mcpServers": { - "hummingbot": { - "command": "docker", - "args": ["run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest"], - "protocol": "stdio" - } - } -} -``` - -#### For Project-Specific Configuration - -Create `.gemini/settings.json` in your project root: - -```json -{ - "mcpServers": { - "hummingbot": { - "command": "docker", - "args": ["run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest"], - "protocol": "stdio" - } - } -} -``` - -#### For IDE Integration - -Create `mcp.json` in your IDE's configuration directory: - -```json -{ - "mcpServers": { - "hummingbot": { - "command": "docker", - "args": ["run", "--rm", "-i", "-e", "HUMMINGBOT_API_URL=http://host.docker.internal:8000", "-v", "hummingbot_mcp:/root/.hummingbot_mcp", "hummingbot/hummingbot-mcp:latest"], - "protocol": "stdio" - } - } -} -``` - -### Managing the Connection - -```bash -# List all configured MCP servers -gemini mcp list - -# View details of the Hummingbot server -gemini mcp get hummingbot - -# Remove the server -gemini mcp remove hummingbot -``` - -## 🔧 Method 2: Direct API Access (Fallback) - -If MCP is unavailable, you can interact with the API directly using HTTP requests. - -### API Endpoints - -The API is accessible at `http://localhost:8000` with interactive Swagger docs at `http://localhost:8000/docs`. - -See @API_REFERENCE.md for the complete endpoint reference. - -### Authentication - -All endpoints require HTTP Basic Authentication: - -```bash -curl -u username:password http://localhost:8000/endpoint -``` - -### Example API Calls - -**Get Portfolio State**: -```bash -curl -u admin:admin -X POST http://localhost:8000/portfolio/state \ - -H "Content-Type: application/json" \ - -d '{"account_names": ["master_account"]}' -``` - -**List Active Bots**: -```bash -curl -u admin:admin http://localhost:8000/bot-orchestration/status -``` - -**Get Market Prices**: -```bash -curl -u admin:admin -X POST http://localhost:8000/market-data/prices \ - -H "Content-Type: application/json" \ - -d '{ - "connector_name": "binance", - "trading_pairs": ["BTC-USDT", "ETH-USDT"] - }' -``` - -## 🌐 Common Workflows - -### Managing Gateway Container (For DEX Trading) - -Gateway is required for decentralized exchange (DEX) trading. Use the `manage_gateway_container` MCP tool through natural language commands. - -#### Using Natural Language (Recommended) - -Once you've configured Gemini with the Hummingbot MCP server, you can manage Gateway using simple commands: - -- **"Start Gateway in development mode with passphrase 'admin'"** - - Launches Gateway container for DEX trading - - Development mode enables HTTP access and Swagger UI - -- **"Check Gateway status"** - - Verifies if Gateway is running - - Shows container details, port, and mode - -- **"Restart the Gateway container"** - - Restarts Gateway if it becomes unresponsive - - Useful for applying configuration changes - -- **"Stop Gateway"** - - Shuts down Gateway when not needed - - Frees up system resources - -#### Using MCP Tool Directly - -If you're building custom integrations, you can call the `manage_gateway_container` tool directly: - -```python -# 1. Configure API connection (first time only) -send_request({ - "jsonrpc": "2.0", - "id": 1, - "method": "tools/call", - "params": { - "name": "configure_api_servers", - "arguments": { - "api_url": "http://host.docker.internal:8000", - "username": "admin", - "password": "admin" - } - } -}) - -# 2. Start Gateway container -send_request({ - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "start", - "config": { - "passphrase": "admin", - "dev_mode": true, - "image": "hummingbot/gateway:latest", - "port": 15888 - } - } - } -}) - -# 3. Check Gateway status -send_request({ - "jsonrpc": "2.0", - "id": 3, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "get_status" - } - } -}) - -# 4. Restart Gateway (if needed) -send_request({ - "jsonrpc": "2.0", - "id": 4, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "restart" - } - } -}) - -# 5. Stop Gateway -send_request({ - "jsonrpc": "2.0", - "id": 5, - "method": "tools/call", - "params": { - "name": "manage_gateway_container", - "arguments": { - "action": "stop" - } - } -}) -``` - -#### Important Notes -- **Development mode** (`dev_mode: true`): HTTP access on port 15888, Swagger UI available at `http://localhost:15888/docs` -- **Production mode** (`dev_mode: false`): HTTPS with certificates required, more secure for production use -- **Passphrase**: Used to encrypt/decrypt DEX wallet keys - store it securely -- **Port**: Default is 15888, ensure it's available on your system - -## 📚 Additional Resources - -- **API Reference**: See @API_REFERENCE.md for all available endpoints -- **README**: See @README.md for complete setup instructions -- **Swagger UI**: http://localhost:8000/docs (interactive API documentation) - -## 🆘 Troubleshooting - -**MCP server not responding**: -```bash -# Check if MCP container is running -docker ps | grep hummingbot-mcp - -# If not, re-enable during setup -./setup.sh # Answer "y" to MCP prompt -``` - -**Configuration not loading**: -- Verify the JSON syntax in your configuration file -- Ensure Docker is running -- Check that the hummingbot-mcp container exists - -**Authentication errors**: -- Verify username and password in `.env` file -- Ensure the API is running: `docker ps | grep hummingbot-api` - -## MCP Tools Best Practices - -### Using `configure_api_servers` for Connection Management - -**Before using any MCP tools**, always ensure the API server is properly configured: - -```python -# Check if connection is working - if any MCP tool fails, reconnect: -configure_api_servers(action="add", name="local", host="localhost", port=8000, username="admin", password="admin") -configure_api_servers(action="set_default", name="local") -``` - -### Using `get_portfolio_overview` for Token Balances - -**Preferred method for checking balances**: -- Use `get_portfolio_overview()` instead of direct API calls -- Includes CEX balances, DEX balances, LP positions, and active orders in one call -- Automatically handles all account types (Hyperliquid, Solana, Ethereum, etc.) - -```python -# Get complete portfolio overview -get_portfolio_overview( - include_balances=True, - include_perp_positions=False, - include_lp_positions=True, - include_active_orders=True, - as_distribution=False -) -``` - -### Common MCP Connection Issue - -**Error**: -``` -Error executing tool get_portfolio_overview: ❌ Failed to connect to Hummingbot API at http://docker.host.internal:8000 - -Connection failed after 3 attempts. - -💡 Solutions: - 1. Check if the API is running and accessible - 2. Verify your credentials are correct - 3. Use 'configure_api_servers' tool for setup - -Original error: Cannot connect to host docker.host.internal:8000 ssl:default [Name or service not known] -``` - -**Root Cause**: The MCP tool loses connection to the API server. This happens when: -- MCP server reconnects/restarts -- API credentials are not cached -- Network configuration changes - -**Solution**: Reconfigure the API server connection before retrying: - -```python -# Step 1: Add server configuration -configure_api_servers( - action="add", - name="local", - host="localhost", - port=8000, - username="admin", - password="admin" -) - -# Step 2: Set as default -configure_api_servers(action="set_default", name="local") - -# Step 3: Retry the operation -get_portfolio_overview(include_balances=True) -``` - -**Prevention**: Always check connection before using other MCP tools. If you see any connection error, immediately run `configure_api_servers` to restore the connection. diff --git a/database/models.py b/database/models.py index a16af95e..bd2b5ae7 100644 --- a/database/models.py +++ b/database/models.py @@ -284,6 +284,10 @@ class GatewayCLMMPosition(Base): lower_bin_id = Column(Integer, nullable=True) # For bin-based CLMM (Meteora) upper_bin_id = Column(Integer, nullable=True) + # Price tracking for PnL calculation + entry_price = Column(Numeric(precision=30, scale=18), nullable=True) # Pool price when position opened + current_price = Column(Numeric(precision=30, scale=18), nullable=True) # Latest price (becomes close price when closed) + # Initial deposit amounts (for PnL calculation) initial_base_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) initial_quote_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) diff --git a/database/repositories/account_repository.py b/database/repositories/account_repository.py index 1e3438ce..a799c130 100644 --- a/database/repositories/account_repository.py +++ b/database/repositories/account_repository.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timedelta from decimal import Decimal from typing import Dict, List, Optional, Tuple import base64 @@ -15,6 +15,54 @@ class AccountRepository: def __init__(self, session: AsyncSession): self.session = session + @staticmethod + def _interval_to_minutes(interval: str) -> int: + """Convert interval string to minutes.""" + interval_map = { + "5m": 5, + "15m": 15, + "30m": 30, + "1h": 60, + "4h": 240, + "12h": 720, + "1d": 1440 + } + return interval_map.get(interval, 5) # Default to 5 minutes + + @staticmethod + def _sample_history_by_interval(history: List[Dict], interval_minutes: int) -> List[Dict]: + """ + Sample historical data points based on the specified interval. + + Args: + history: List of historical data points sorted by timestamp (descending) + interval_minutes: Sampling interval in minutes + + Returns: + Sampled list of data points + """ + if not history or interval_minutes <= 5: + return history # Return all data for 5m or less + + sampled = [] + last_sampled_time = None + + for item in history: + item_time = datetime.fromisoformat(item["timestamp"].replace('Z', '+00:00')) + + if last_sampled_time is None: + # Always include the first (most recent) data point + sampled.append(item) + last_sampled_time = item_time + else: + # Check if enough time has passed since last sampled point + time_diff = (last_sampled_time - item_time).total_seconds() / 60 + if time_diff >= interval_minutes: + sampled.append(item) + last_sampled_time = item_time + + return sampled + async def save_account_state(self, account_name: str, connector_name: str, tokens_info: List[Dict], snapshot_timestamp: Optional[datetime] = None) -> AccountState: """ @@ -99,19 +147,30 @@ async def get_latest_account_states(self) -> Dict[str, Dict[str, List[Dict]]]: return accounts_state - async def get_account_state_history(self, + async def get_account_state_history(self, limit: Optional[int] = None, account_name: Optional[str] = None, connector_name: Optional[str] = None, cursor: Optional[str] = None, start_time: Optional[datetime] = None, - end_time: Optional[datetime] = None) -> Tuple[List[Dict], Optional[str], bool]: + end_time: Optional[datetime] = None, + interval: str = "5m") -> Tuple[List[Dict], Optional[str], bool]: """ - Get historical account states with cursor-based pagination. - + Get historical account states with cursor-based pagination and interval sampling. + + Args: + limit: Maximum number of records to return + account_name: Filter by account name + connector_name: Filter by connector name + cursor: Cursor for pagination + start_time: Start time filter + end_time: End time filter + interval: Sampling interval (5m, 15m, 30m, 1h, 4h, 12h, 1d) + Returns: Tuple of (data, next_cursor, has_more) """ + interval_minutes = self._interval_to_minutes(interval) query = ( select(AccountState) .options(joinedload(AccountState.token_states)) @@ -136,24 +195,16 @@ async def get_account_state_history(self, except (ValueError, TypeError): # Invalid cursor, ignore it pass - - # Fetch limit + 1 to check if there are more records - fetch_limit = limit + 1 if limit else 101 + + # Fetch more records than requested to ensure we have enough after sampling + # For intervals > 5m, we need to fetch more data to get enough sampled points + sampling_multiplier = max(1, interval_minutes // 5) # How many 5m intervals per sample + fetch_limit = (limit * sampling_multiplier + 1) if limit else (100 * sampling_multiplier + 1) query = query.limit(fetch_limit) result = await self.session.execute(query) account_states = result.unique().scalars().all() - - # Check if there are more records - has_more = len(account_states) == fetch_limit - if has_more: - account_states = account_states[:-1] # Remove the extra record - - # Generate next cursor - next_cursor = None - if has_more and account_states: - next_cursor = account_states[-1].timestamp.isoformat() - + # Format response - Group by minute to aggregate account/connector states minute_groups = {} for account_state in account_states: @@ -166,29 +217,42 @@ async def get_account_state_history(self, "value": float(token_state.value), "available_units": float(token_state.available_units) }) - + # Round timestamp to the nearest minute for grouping minute_timestamp = account_state.timestamp.replace(second=0, microsecond=0) minute_key = minute_timestamp.isoformat() - + # Initialize minute group if it doesn't exist if minute_key not in minute_groups: minute_groups[minute_key] = { "timestamp": minute_key, "state": {} } - + # Add account/connector to the minute group if account_state.account_name not in minute_groups[minute_key]["state"]: minute_groups[minute_key]["state"][account_state.account_name] = {} - + minute_groups[minute_key]["state"][account_state.account_name][account_state.connector_name] = token_info - + # Convert to list and maintain chronological order (most recent first) history = list(minute_groups.values()) history.sort(key=lambda x: x["timestamp"], reverse=True) - - return history, next_cursor, has_more + + # Apply interval sampling + sampled_history = self._sample_history_by_interval(history, interval_minutes) + + # Apply limit and check if there are more records after sampling + has_more = len(sampled_history) > limit if limit else False + if has_more: + sampled_history = sampled_history[:limit] + + # Generate next cursor from the last sampled item + next_cursor = None + if has_more and sampled_history: + next_cursor = sampled_history[-1]["timestamp"] + + return sampled_history, next_cursor, has_more async def get_account_current_state(self, account_name: str) -> Dict[str, List[Dict]]: """ diff --git a/database/repositories/gateway_clmm_repository.py b/database/repositories/gateway_clmm_repository.py index 405480bf..9c69d3da 100644 --- a/database/repositories/gateway_clmm_repository.py +++ b/database/repositories/gateway_clmm_repository.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List, Optional from decimal import Decimal @@ -35,9 +35,10 @@ async def update_position_liquidity( position_address: str, base_token_amount: Decimal, quote_token_amount: Decimal, - in_range: Optional[str] = None + in_range: Optional[str] = None, + current_price: Optional[Decimal] = None ) -> Optional[GatewayCLMMPosition]: - """Update position liquidity amounts.""" + """Update position liquidity amounts and current price.""" result = await self.session.execute( select(GatewayCLMMPosition).where(GatewayCLMMPosition.position_address == position_address) ) @@ -47,6 +48,8 @@ async def update_position_liquidity( position.quote_token_amount = float(quote_token_amount) if in_range is not None: position.in_range = in_range + if current_price is not None: + position.current_price = float(current_price) await self.session.flush() return position @@ -220,25 +223,106 @@ async def get_pending_events(self, limit: int = 100) -> List[GatewayCLMMEvent]: # ============================================ def position_to_dict(self, position: GatewayCLMMPosition) -> Dict: - """Convert GatewayCLMMPosition model to dictionary format with PnL calculation.""" - # Calculate PnL if initial amounts are available + """Convert GatewayCLMMPosition model to dictionary format with enhanced PnL calculation.""" pnl_summary = None - if position.initial_base_token_amount is not None and position.initial_quote_token_amount is not None: - # Current total value = current liquidity + fees collected - current_base_total = float(position.base_token_amount) + float(position.base_fee_collected) + float(position.base_fee_pending) - current_quote_total = float(position.quote_token_amount) + float(position.quote_fee_collected) + float(position.quote_fee_pending) - # PnL = current - initial - base_pnl = current_base_total - float(position.initial_base_token_amount) - quote_pnl = current_quote_total - float(position.initial_quote_token_amount) + # Get prices for PnL calculation + entry_price = float(position.entry_price) if position.entry_price else None + current_price = float(position.current_price) if position.current_price else None + + # Calculate PnL if we have initial amounts and prices + if (position.initial_base_token_amount is not None and + position.initial_quote_token_amount is not None and + entry_price and entry_price > 0 and + current_price and current_price > 0): + + # Initial amounts + initial_base = float(position.initial_base_token_amount) + initial_quote = float(position.initial_quote_token_amount) + + # Current liquidity amounts + current_base = float(position.base_token_amount) + current_quote = float(position.quote_token_amount) + + # Total fees (collected + pending) + total_fees_base = float(position.base_fee_collected) + float(position.base_fee_pending) + total_fees_quote = float(position.quote_fee_collected) + float(position.quote_fee_pending) + + # Value calculations (all normalized to quote currency) + initial_value_quote = initial_base * entry_price + initial_quote + current_lp_value_quote = current_base * current_price + current_quote + total_fees_value_quote = total_fees_base * current_price + total_fees_quote + current_total_value_quote = current_lp_value_quote + total_fees_value_quote + + # HODL comparison: what if user just held initial tokens without LP + hodl_value_quote = initial_base * current_price + initial_quote + + # Impermanent loss (negative = loss due to LP vs holding) + impermanent_loss_quote = current_lp_value_quote - hodl_value_quote + + # Total P&L + total_pnl_quote = current_total_value_quote - initial_value_quote + total_pnl_pct = (total_pnl_quote / initial_value_quote * 100) if initial_value_quote > 0 else 0 + + # Price change + price_change_pct = ((current_price - entry_price) / entry_price * 100) if entry_price > 0 else 0 + + # Duration and APR estimate + duration_hours = 0 + fee_apr_estimate = None + if position.created_at: + # Use closed_at if closed, otherwise current time + end_time = position.closed_at if position.closed_at else datetime.now(timezone.utc) + # Handle timezone-naive datetimes + if position.created_at.tzinfo is None: + created_at = position.created_at.replace(tzinfo=timezone.utc) + else: + created_at = position.created_at + if end_time.tzinfo is None: + end_time = end_time.replace(tzinfo=timezone.utc) + + duration_seconds = (end_time - created_at).total_seconds() + duration_hours = duration_seconds / 3600 + + # Calculate fee APR if we have meaningful duration + if duration_seconds > 0 and initial_value_quote > 0: + duration_years = duration_seconds / (365.25 * 24 * 3600) + if duration_years > 0: + fee_apr_estimate = (total_fees_value_quote / initial_value_quote / duration_years * 100) pnl_summary = { - "initial_base": float(position.initial_base_token_amount), - "initial_quote": float(position.initial_quote_token_amount), - "current_base_total": current_base_total, - "current_quote_total": current_quote_total, - "base_pnl": base_pnl, - "quote_pnl": quote_pnl + # Prices + "entry_price": round(entry_price, 8), + "current_price": round(current_price, 8), + "price_change_pct": round(price_change_pct, 4), + + # Initial state + "initial_base": round(initial_base, 8), + "initial_quote": round(initial_quote, 8), + "initial_value_quote": round(initial_value_quote, 8), + + # Current position (liquidity only, no fees) + "current_base": round(current_base, 8), + "current_quote": round(current_quote, 8), + "current_lp_value_quote": round(current_lp_value_quote, 8), + + # Fees earned + "total_fees_base": round(total_fees_base, 8), + "total_fees_quote": round(total_fees_quote, 8), + "total_fees_value_quote": round(total_fees_value_quote, 8), + + # HODL comparison + "hodl_value_quote": round(hodl_value_quote, 8), + + # Key metrics + "impermanent_loss_quote": round(impermanent_loss_quote, 8), + "current_total_value_quote": round(current_total_value_quote, 8), + "total_pnl_quote": round(total_pnl_quote, 8), + "total_pnl_pct": round(total_pnl_pct, 4), + + # Time metrics + "duration_hours": round(duration_hours, 2), + "fee_apr_estimate": round(fee_apr_estimate, 2) if fee_apr_estimate else None } return { @@ -257,6 +341,8 @@ def position_to_dict(self, position: GatewayCLMMPosition) -> Dict: "upper_price": float(position.upper_price), "lower_bin_id": position.lower_bin_id, "upper_bin_id": position.upper_bin_id, + "entry_price": entry_price, + "current_price": current_price, "percentage": float(position.percentage) if position.percentage is not None else None, "initial_base_token_amount": float(position.initial_base_token_amount) if position.initial_base_token_amount is not None else None, "initial_quote_token_amount": float(position.initial_quote_token_amount) if position.initial_quote_token_amount is not None else None, diff --git a/docker-compose.yml b/docker-compose.yml index a32947b5..c33f7ebb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,21 +1,4 @@ services: - # Uncomment to enable Dashboard (optional web interface) - # dashboard: - # container_name: dashboard - # image: hummingbot/dashboard:latest - # ports: - # - "8501:8501" - # environment: - # - AUTH_SYSTEM_ENABLED=False - # - BACKEND_API_HOST=hummingbot-api - # - BACKEND_API_PORT=8000 - # - BACKEND_API_USERNAME=${USERNAME} - # - BACKEND_API_PASSWORD=${PASSWORD} - # volumes: - # - ./dashboard-credentials.yml:/home/dashboard/credentials.yml - # - ./pages:/home/dashboard/pages - # networks: - # - emqx-bridge hummingbot-api: container_name: hummingbot-api image: hummingbot/hummingbot-api:latest diff --git a/main.py b/main.py index cef9329b..e668dab7 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,7 @@ import secrets from contextlib import asynccontextmanager from typing import Annotated +from urllib.parse import urlparse import logfire import logging @@ -24,6 +25,8 @@ def patched_save_to_yml(yml_path, cm): config_helpers.save_to_yml = patched_save_to_yml from hummingbot.core.rate_oracle.rate_oracle import RateOracle +from hummingbot.core.gateway.gateway_http_client import GatewayHttpClient +from hummingbot.client.config.client_config_map import GatewayConfigMap from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials @@ -90,6 +93,17 @@ async def lifespan(app: FastAPI): BackendAPISecurity.store_password_verification(secrets_manager) logging.info("Created password verification file for master_account") + # Initialize GatewayHttpClient singleton with proper config from settings + # This must happen BEFORE MarketDataProvider is created, as it uses GatewayHttpClient.get_instance() + parsed_gateway_url = urlparse(settings.gateway.url) + gateway_config = GatewayConfigMap( + gateway_api_host=parsed_gateway_url.hostname or "localhost", + gateway_api_port=str(parsed_gateway_url.port or 15888), + gateway_use_ssl=parsed_gateway_url.scheme == "https" + ) + GatewayHttpClient.get_instance(gateway_config) + logging.info(f"Initialized GatewayHttpClient with URL: {settings.gateway.url}") + # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) market_data_provider = MarketDataProvider(connectors={}) diff --git a/models/gateway.py b/models/gateway.py index 9ad85bfd..042711b4 100644 --- a/models/gateway.py +++ b/models/gateway.py @@ -48,11 +48,14 @@ class GatewayWalletInfo(BaseModel): class AddPoolRequest(BaseModel): """Request to add a liquidity pool""" connector_name: str = Field(description="DEX connector name (e.g., 'raydium', 'meteora')") - type: str = Field(description="Pool type ('clmm' for concentrated liquidity)") - network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta', 'ethereum-mainnet')") + type: str = Field(description="Pool type ('clmm' or 'amm')") + network: str = Field(description="Network name (e.g., 'mainnet-beta')") + address: str = Field(description="Pool contract address") base: str = Field(description="Base token symbol") quote: str = Field(description="Quote token symbol") - address: str = Field(description="Pool contract address") + base_address: str = Field(description="Base token contract address") + quote_address: str = Field(description="Quote token contract address") + fee_pct: Optional[float] = Field(default=None, description="Pool fee percentage (e.g., 0.25)") class AddTokenRequest(BaseModel): diff --git a/models/gateway_trading.py b/models/gateway_trading.py index 2227f418..02d7c422 100644 --- a/models/gateway_trading.py +++ b/models/gateway_trading.py @@ -202,12 +202,12 @@ class CLMMPoolInfoResponse(BaseModel): address: str = Field(description="Pool address") base_token_address: str = Field(alias="baseTokenAddress", description="Base token contract address") quote_token_address: str = Field(alias="quoteTokenAddress", description="Quote token contract address") - bin_step: int = Field(alias="binStep", description="Bin step (price difference between bins)") + bin_step: Optional[int] = Field(None, alias="binStep", description="Bin step (Meteora DLMM only)") fee_pct: Decimal = Field(alias="feePct", description="Pool fee percentage") price: Decimal = Field(description="Current pool price") base_token_amount: Decimal = Field(alias="baseTokenAmount", description="Total base token liquidity") quote_token_amount: Decimal = Field(alias="quoteTokenAmount", description="Total quote token liquidity") - active_bin_id: int = Field(alias="activeBinId", description="Currently active bin ID") + active_bin_id: Optional[int] = Field(None, alias="activeBinId", description="Currently active bin ID (Meteora DLMM only)") dynamic_fee_pct: Optional[Decimal] = Field(None, alias="dynamicFeePct", description="Dynamic fee percentage") min_bin_id: Optional[int] = Field(None, alias="minBinId", description="Minimum bin ID (Meteora-specific)") max_bin_id: Optional[int] = Field(None, alias="maxBinId", description="Maximum bin ID (Meteora-specific)") diff --git a/models/trading.py b/models/trading.py index a3449e3d..23a6524a 100644 --- a/models/trading.py +++ b/models/trading.py @@ -189,12 +189,26 @@ class PortfolioStateFilterRequest(BaseModel): """Request model for filtering portfolio state""" account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + skip_gateway: bool = Field(default=False, description="Skip Gateway wallet balance updates for faster CEX-only queries") class PortfolioHistoryFilterRequest(TimeRangePaginationParams): """Request model for filtering portfolio history""" account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + interval: Optional[str] = Field( + default="5m", + description="Data sampling interval: 5m, 15m, 30m, 1h, 4h, 12h, 1d. Default is 5m (raw data)" + ) + + @field_validator('interval') + @classmethod + def validate_interval(cls, v): + """Validate that interval is a supported value.""" + valid_intervals = ["5m", "15m", "30m", "1h", "4h", "12h", "1d"] + if v not in valid_intervals: + raise ValueError(f"Invalid interval '{v}'. Must be one of: {valid_intervals}") + return v class PortfolioDistributionFilterRequest(BaseModel): diff --git a/routers/gateway.py b/routers/gateway.py index 7740009a..105933b5 100644 --- a/routers/gateway.py +++ b/routers/gateway.py @@ -314,13 +314,20 @@ async def add_pool( connector=pool_request.connector_name, pool_type=pool_request.type, network=pool_request.network, + address=pool_request.address, base_symbol=pool_request.base, quote_symbol=pool_request.quote, - address=pool_request.address + base_token_address=pool_request.base_address, + quote_token_address=pool_request.quote_address, + fee_pct=pool_request.fee_pct ) + if result is None: + raise HTTPException(status_code=502, detail="Failed to add pool: Gateway returned no response") + if "error" in result: - raise HTTPException(status_code=400, detail=f"Failed to add pool: {result.get('error')}") + status = result.get("status", 400) + raise HTTPException(status_code=status, detail=f"Failed to add pool: {result.get('error')}") trading_pair = f"{pool_request.base}-{pool_request.quote}" return { @@ -334,6 +341,56 @@ async def add_pool( raise HTTPException(status_code=500, detail=f"Error adding pool: {str(e)}") +@router.delete("/pools/{address}") +async def delete_pool( + address: str, + connector_name: str = Query(description="DEX connector (e.g., 'meteora', 'raydium', 'uniswap')"), + network: str = Query(description="Network name (e.g., 'mainnet-beta', 'mainnet')"), + pool_type: str = Query(description="Pool type (e.g., 'clmm', 'amm')"), + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Delete a liquidity pool from Gateway's pool list. + + Args: + address: Pool contract address to remove + connector_name: DEX connector (e.g., 'meteora', 'raydium', 'uniswap') + network: Network name (e.g., 'mainnet-beta', 'mainnet') + pool_type: Pool type (e.g., 'clmm', 'amm') + + Example: DELETE /gateway/pools/2sf5NYcY...?connector_name=meteora&network=mainnet-beta&pool_type=clmm + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client.delete_pool( + connector=connector_name, + network=network, + pool_type=pool_type, + address=address + ) + + if result is None: + raise HTTPException(status_code=400, detail="Failed to delete pool - no response from Gateway") + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Failed to delete pool: {result.get('error')}") + + return { + "success": True, + "message": f"Pool {address} deleted from {connector_name}/{network}", + "pool_address": address, + "connector": connector_name, + "network": network + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error deleting pool: {str(e)}") + + # ============================================ # Networks (Primary Endpoints) # ============================================ diff --git a/routers/gateway_clmm.py b/routers/gateway_clmm.py index 1ced78aa..69bfad79 100644 --- a/routers/gateway_clmm.py +++ b/routers/gateway_clmm.py @@ -95,7 +95,7 @@ async def fetch_raydium_pool_info(pool_address: str) -> Optional[dict]: Dictionary with pool info from Raydium API, or None if failed """ try: - url = f"https://api-v3.raydium.io/pools/line/position?id={pool_address}" + url = f"https://api-v3.raydium.io/pools/info/ids?ids={pool_address}" async with aiohttp.ClientSession() as session: async with session.get(url, headers={"accept": "application/json"}) as response: response.raise_for_status() @@ -105,7 +105,14 @@ async def fetch_raydium_pool_info(pool_address: str) -> Optional[dict]: logger.error(f"Raydium API returned unsuccessful response: {data}") return None - return data + # Extract the first pool from the data list + pools_data = data.get("data", []) + if not pools_data: + logger.error(f"Raydium API returned empty data for pool: {pool_address}") + return None + + # Return the pool data directly (not wrapped in data key) + return pools_data[0] except aiohttp.ClientError as e: logger.error(f"Failed to fetch pool info from Raydium API: {e}") return None @@ -119,69 +126,49 @@ def transform_raydium_to_clmm_response(raydium_data: dict, pool_address: str) -> Transform Raydium API response to match Gateway's CLMMPoolInfoResponse format. Args: - raydium_data: Response from Raydium API + raydium_data: Pool data from Raydium API (pools/info/ids endpoint) pool_address: Pool contract address Returns: Dictionary matching Gateway's pool info structure """ - pool_data = raydium_data.get("data", {}) - line_data = pool_data.get("line", []) - - if not line_data: - raise ValueError("No liquidity bins found in Raydium pool data") - - # Sort bins by tick to find the active bin - sorted_bins = sorted(line_data, key=lambda x: x.get("tick", 0)) - - # Calculate active bin (the one with mid-range tick) - # For Raydium, we need to determine the current active bin based on the pool state - # We'll use the middle bin as a proxy for active bin - active_bin_idx = len(sorted_bins) // 2 - active_bin = sorted_bins[active_bin_idx] - - # Calculate total liquidity across all bins - total_base_liquidity = sum(Decimal(str(bin_data.get("liquidity", 0))) for bin_data in line_data) - total_quote_liquidity = total_base_liquidity # Approximation - - # Extract min and max ticks - min_tick = sorted_bins[0].get("tick", 0) if sorted_bins else 0 - max_tick = sorted_bins[-1].get("tick", 0) if sorted_bins else 0 - - # Convert ticks to bin IDs (assuming 1:1 mapping for simplicity) - min_bin_id = min_tick - max_bin_id = max_tick - active_bin_id = active_bin.get("tick", 0) - - # Get current price from active bin - current_price = Decimal(str(active_bin.get("price", 0))) - - # Transform bins to match Gateway format - bins = [] - for bin_data in line_data[:100]: # Limit to 100 bins for performance - liquidity = Decimal(str(bin_data.get("liquidity", 0))) - bins.append({ - "binId": bin_data.get("tick", 0), - "price": Decimal(str(bin_data.get("price", 0))), - "baseTokenAmount": liquidity, - "quoteTokenAmount": liquidity # Approximation - }) + # Extract token info + mint_a = raydium_data.get("mintA", {}) + mint_b = raydium_data.get("mintB", {}) + + base_token_address = mint_a.get("address", "") + quote_token_address = mint_b.get("address", "") + + # Get current price + current_price = Decimal(str(raydium_data.get("price", 0))) + + # Get token amounts + base_amount = Decimal(str(raydium_data.get("mintAmountA", 0))) + quote_amount = Decimal(str(raydium_data.get("mintAmountB", 0))) + + # Get fee rate (convert from decimal to percentage, e.g., 0.0025 -> 0.25%) + fee_rate = raydium_data.get("feeRate", 0.0025) + fee_pct = Decimal(str(fee_rate * 100)) + + # Check if this is a CLMM (Concentrated) pool + pool_type = raydium_data.get("type", "Standard") + is_clmm = pool_type == "Concentrated" # Return in Gateway-compatible format return { "address": pool_address, - "baseTokenAddress": "unknown", # Not provided by Raydium API - "quoteTokenAddress": "unknown", # Not provided by Raydium API - "binStep": 1, # Default value, not provided by Raydium API - "feePct": Decimal("0.25"), # Typical Raydium CLMM fee + "baseTokenAddress": base_token_address, + "quoteTokenAddress": quote_token_address, + "binStep": 1 if is_clmm else None, # CLMM pools have tick spacing + "feePct": fee_pct, "price": current_price, - "baseTokenAmount": total_base_liquidity, - "quoteTokenAmount": total_quote_liquidity, - "activeBinId": active_bin_id, + "baseTokenAmount": base_amount, + "quoteTokenAmount": quote_amount, + "activeBinId": None, # Not available from this endpoint "dynamicFeePct": None, - "minBinId": min_bin_id, - "maxBinId": max_bin_id, - "bins": bins + "minBinId": None, + "maxBinId": None, + "bins": [] # Bin data not available from pool info endpoint } @@ -299,12 +286,13 @@ async def _refresh_position_data(position, accounts_service: AccountsService, cl await clmm_repo.close_position(position.position_address) return - # Update liquidity amounts and in_range status + # Update liquidity amounts, in_range status, and current price await clmm_repo.update_position_liquidity( position_address=position.position_address, base_token_amount=base_token_amount, quote_token_amount=quote_token_amount, - in_range=in_range + in_range=in_range, + current_price=current_price ) # Update pending fees if available @@ -318,7 +306,7 @@ async def _refresh_position_data(position, accounts_service: AccountsService, cl quote_fee_pending=quote_fee_pending ) - logger.debug(f"Refreshed position {position.position_address}: in_range={in_range}, " + logger.debug(f"Refreshed position {position.position_address}: price={current_price}, in_range={in_range}, " f"base={base_token_amount}, quote={quote_token_amount}") except Exception as e: @@ -361,6 +349,15 @@ async def get_clmm_pool_info( if raydium_data is None: raise HTTPException(status_code=503, detail="Failed to get pool info from Raydium API") + # Check if this is a CLMM pool - Standard AMM pools are not supported on this endpoint + pool_type = raydium_data.get("type", "Standard") + if pool_type != "Concentrated": + raise HTTPException( + status_code=400, + detail=f"Pool {pool_address} is a Raydium {pool_type} AMM pool, not a CLMM pool. " + f"This endpoint only supports Concentrated Liquidity (CLMM) pools." + ) + # Transform to Gateway-compatible format result = transform_raydium_to_clmm_response(raydium_data, pool_address) @@ -586,6 +583,11 @@ async def open_clmm_position( base_token_address = pool_info.get("baseTokenAddress", "") quote_token_address = pool_info.get("quoteTokenAddress", "") + # Extract entry price from pool info (current pool price at time of opening) + entry_price = float(pool_info.get("price", 0)) if pool_info.get("price") else None + if entry_price: + logger.info(f"Entry price for position: {entry_price}") + # Store full token addresses in the database base = base_token_address if base_token_address else "UNKNOWN" quote = quote_token_address if quote_token_address else "UNKNOWN" @@ -655,6 +657,8 @@ async def open_clmm_position( "lower_price": float(request.lower_price), "upper_price": float(request.upper_price), "percentage": percentage, + "entry_price": entry_price, # Pool price when position opened + "current_price": entry_price, # Same as entry at open time, updated by poller "initial_base_token_amount": float(request.base_token_amount) if request.base_token_amount else 0, "initial_quote_token_amount": float(request.quote_token_amount) if request.quote_token_amount else 0, "position_rent": float(position_rent) if position_rent else None, @@ -939,9 +943,10 @@ async def close_clmm_position( detail=f"Position {request.position_address} not found in database. Pool address is required." ) - # Fetch pending fees BEFORE closing (Gateway doesn't always return collected amounts in response) + # Fetch pending fees and current price BEFORE closing (Gateway doesn't always return these in response) base_fee_to_collect = Decimal("0") quote_fee_to_collect = Decimal("0") + close_price = None try: positions_list = await accounts_service.gateway_client.clmm_positions_owned( @@ -951,18 +956,19 @@ async def close_clmm_position( pool_address=pool_address ) - # Find our specific position and get pending fees + # Find our specific position and get pending fees and current price if positions_list and isinstance(positions_list, list): for pos in positions_list: if pos and pos.get("address") == request.position_address: base_fee_to_collect = Decimal(str(pos.get("baseFeeAmount", 0))) quote_fee_to_collect = Decimal(str(pos.get("quoteFeeAmount", 0))) - logger.info(f"Pending fees before closing: base={base_fee_to_collect}, quote={quote_fee_to_collect}") + close_price = float(pos.get("price", 0)) if pos.get("price") else None + logger.info(f"Before closing: price={close_price}, pending fees base={base_fee_to_collect}, quote={quote_fee_to_collect}") break else: logger.warning(f"Could not find position {request.position_address} in positions_owned response") except Exception as e: - logger.warning(f"Could not fetch pending fees before closing: {e}", exc_info=True) + logger.warning(f"Could not fetch position state before closing: {e}", exc_info=True) # Close position result = await accounts_service.gateway_client.clmm_close_position( @@ -1028,6 +1034,15 @@ async def close_clmm_position( quote_fee_pending=Decimal("0") ) + # Update current_price with close price before marking as closed + if close_price: + await clmm_repo.update_position_liquidity( + position_address=request.position_address, + base_token_amount=Decimal(str(position.base_token_amount)), + quote_token_amount=Decimal(str(position.quote_token_amount)), + current_price=Decimal(str(close_price)) + ) + # Mark position as CLOSED await clmm_repo.close_position(request.position_address) logger.info(f"Updated position {request.position_address}: collected fees updated, pending fees reset to 0, status set to CLOSED") diff --git a/routers/portfolio.py b/routers/portfolio.py index 6941cfc7..82b9fad2 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -23,14 +23,17 @@ async def get_portfolio_state( ): """ Get the current state of all or filtered accounts portfolio. - + Args: - filter_request: JSON payload with filtering criteria - + filter_request: JSON payload with filtering criteria including: + - account_names: Optional list of account names to filter by + - connector_names: Optional list of connector names to filter by + - skip_gateway: If True, skip Gateway wallet balance updates for faster CEX-only queries + Returns: Dict containing account states with connector balances and token information """ - await accounts_service.update_account_state() + await accounts_service.update_account_state(skip_gateway=filter_request.skip_gateway) all_states = accounts_service.get_accounts_state() # Apply account name filter first @@ -61,26 +64,39 @@ async def get_portfolio_history( accounts_service: AccountsService = Depends(get_accounts_service) ): """ - Get the historical state of all or filtered accounts portfolio with pagination. - + Get the historical state of all or filtered accounts portfolio with pagination and interval sampling. + + The interval parameter allows you to control data granularity: + - 5m: Raw data (default, collected every 5 minutes) + - 15m: One data point every 15 minutes + - 30m: One data point every 30 minutes + - 1h: One data point every hour + - 4h: One data point every 4 hours + - 12h: One data point every 12 hours + - 1d: One data point every day + + Using larger intervals significantly reduces response size and improves performance. + Args: - filter_request: JSON payload with filtering criteria - + filter_request: JSON payload with filtering criteria (account_names, connector_names, + start_time, end_time, limit, cursor, interval) + Returns: - Paginated response with historical portfolio data + Paginated response with historical portfolio data sampled at the requested interval """ try: # Convert integer timestamps to datetime objects start_time_dt = datetime.fromtimestamp(filter_request.start_time / 1000) if filter_request.start_time else None end_time_dt = datetime.fromtimestamp(filter_request.end_time / 1000) if filter_request.end_time else None - + if not filter_request.account_names: # Get history for all accounts data, next_cursor, has_more = await accounts_service.load_account_state_history( limit=filter_request.limit, cursor=filter_request.cursor, start_time=start_time_dt, - end_time=end_time_dt + end_time=end_time_dt, + interval=filter_request.interval ) else: # Get history for specific accounts - need to aggregate @@ -91,7 +107,8 @@ async def get_portfolio_history( limit=filter_request.limit, cursor=filter_request.cursor, start_time=start_time_dt, - end_time=end_time_dt + end_time=end_time_dt, + interval=filter_request.interval ) all_data.extend(acc_data) @@ -125,7 +142,8 @@ async def get_portfolio_history( "account_names": filter_request.account_names, "connector_names": filter_request.connector_names, "start_time": filter_request.start_time, - "end_time": filter_request.end_time + "end_time": filter_request.end_time, + "interval": filter_request.interval } } ) diff --git a/services/accounts_service.py b/services/accounts_service.py index 4fa9bc53..2cb5773b 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -195,17 +195,26 @@ async def dump_account_state(self): # Re-raise the exception since we no longer have a fallback raise - async def load_account_state_history(self, + async def load_account_state_history(self, limit: Optional[int] = None, cursor: Optional[str] = None, start_time: Optional[datetime] = None, - end_time: Optional[datetime] = None): + end_time: Optional[datetime] = None, + interval: str = "5m"): """ - Load the account state history from the database with pagination. + Load the account state history from the database with pagination and interval sampling. + + Args: + limit: Maximum number of records to return + cursor: Cursor for pagination + start_time: Start time filter + end_time: End time filter + interval: Sampling interval (5m, 15m, 30m, 1h, 4h, 12h, 1d) + :return: Tuple of (data, next_cursor, has_more). """ await self.ensure_db_initialized() - + try: async with self.db_manager.get_session_context() as session: repository = AccountRepository(session) @@ -213,7 +222,8 @@ async def load_account_state_history(self, limit=limit, cursor=cursor, start_time=start_time, - end_time=end_time + end_time=end_time, + interval=interval ) except Exception as e: logger.error(f"Error loading account state history from database: {e}") @@ -289,23 +299,40 @@ async def _initialize_price_tracking(self, account_name: str, connector_name: st except Exception as e: logger.error(f"Error initializing price tracking for {connector_name} in account {account_name}: {e}") - async def update_account_state(self): - """Update account state for all connectors and Gateway wallets.""" + async def update_account_state(self, skip_gateway: bool = False): + """Update account state for all connectors and optionally Gateway wallets. + + Args: + skip_gateway: If True, skip Gateway wallet balance updates for faster CEX-only queries. + """ all_connectors = self.connector_manager.get_all_connectors() + # Prepare parallel tasks + tasks = [] + task_meta = [] # (account_name, connector_name) + for account_name, connectors in all_connectors.items(): if account_name not in self.accounts_state: self.accounts_state[account_name] = {} for connector_name, connector in connectors.items(): - try: - tokens_info = await self._get_connector_tokens_info(connector, connector_name) - self.accounts_state[account_name][connector_name] = tokens_info - except Exception as e: - logger.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") - self.accounts_state[account_name][connector_name] = [] + tasks.append(self._get_connector_tokens_info(connector, connector_name)) + task_meta.append((account_name, connector_name)) - # Add Gateway wallet balances to master_account if Gateway is available - await self._update_gateway_balances() + # Execute connectors + gateway in parallel (unless skip_gateway is True) + if skip_gateway: + results = await asyncio.gather(*tasks, return_exceptions=True) + else: + results = await asyncio.gather(*tasks, self._update_gateway_balances(), return_exceptions=True) + # Remove gateway result from processing (it handles its own state internally) + results = results[:-1] + + # Process results + for (account_name, connector_name), result in zip(task_meta, results): + if isinstance(result, Exception): + logger.error(f"Error updating balances for connector {connector_name} in account {account_name}: {result}") + self.accounts_state[account_name][connector_name] = [] + else: + self.accounts_state[account_name][connector_name] = result async def _get_connector_tokens_info(self, connector, connector_name: str) -> List[Dict]: """Get token info from a connector instance using cached prices when available.""" @@ -451,18 +478,20 @@ async def delete_credentials(self, account_name: str, connector_name: str): :param connector_name: :return: """ + # Delete credentials file if it exists if fs_util.path_exists(f"credentials/{account_name}/connectors/{connector_name}.yml"): fs_util.delete_file(directory=f"credentials/{account_name}/connectors", file_name=f"{connector_name}.yml") - - # Stop the connector if it's running - await self.connector_manager.stop_connector(account_name, connector_name) - - # Remove from account state - if account_name in self.accounts_state and connector_name in self.accounts_state[account_name]: - self.accounts_state[account_name].pop(connector_name) - - # Clear the connector from cache - self.connector_manager.clear_cache(account_name, connector_name) + + # Always perform cleanup regardless of file existence + # Stop the connector if it's running + await self.connector_manager.stop_connector(account_name, connector_name) + + # Remove from account state + if account_name in self.accounts_state and connector_name in self.accounts_state[account_name]: + self.accounts_state[account_name].pop(connector_name) + + # Clear the connector from cache + self.connector_manager.clear_cache(account_name, connector_name) def add_account(self, account_name: str): """ @@ -518,26 +547,36 @@ async def get_account_current_state(self, account_name: str) -> Dict[str, List[D # Fallback to in-memory state return self.accounts_state.get(account_name, {}) - async def get_account_state_history(self, - account_name: str, + async def get_account_state_history(self, + account_name: str, limit: Optional[int] = None, cursor: Optional[str] = None, start_time: Optional[datetime] = None, - end_time: Optional[datetime] = None): + end_time: Optional[datetime] = None, + interval: str = "5m"): """ - Get historical state for a specific account with pagination. + Get historical state for a specific account with pagination and interval sampling. + + Args: + account_name: Account name to filter by + limit: Maximum number of records to return + cursor: Cursor for pagination + start_time: Start time filter + end_time: End time filter + interval: Sampling interval (5m, 15m, 30m, 1h, 4h, 12h, 1d) """ await self.ensure_db_initialized() - + try: async with self.db_manager.get_session_context() as session: repository = AccountRepository(session) return await repository.get_account_state_history( - account_name=account_name, + account_name=account_name, limit=limit, cursor=cursor, start_time=start_time, - end_time=end_time + end_time=end_time, + interval=interval ) except Exception as e: logger.error(f"Error getting account state history: {e}") @@ -1355,6 +1394,18 @@ async def _update_gateway_balances(self): wallets = await self.gateway_client.get_wallets() if not wallets: logger.debug("No Gateway wallets found") + # Clear any stale gateway balances from master_account when no wallets exist + if "master_account" in self.accounts_state: + chains_result = await self.gateway_client.get_chains() + if chains_result and "chains" in chains_result: + known_chains = {c["chain"] for c in chains_result["chains"]} + stale_keys = [ + key for key in list(self.accounts_state["master_account"].keys()) + if "-" in key and key.split("-")[0] in known_chains + ] + for key in stale_keys: + logger.info(f"Removing stale Gateway balance data for {key} (no wallets exist)") + del self.accounts_state["master_account"][key] return # Get all available chains and networks @@ -1400,6 +1451,10 @@ async def _update_gateway_balances(self): t_zero = time.time() results = await asyncio.gather(*balance_tasks, return_exceptions=True) duration = time.time() - t_zero + + # Build set of active chain-network keys from current wallets + active_chain_networks = {f"{chain}-{network}" for chain, network, _ in task_metadata} + # Process results for idx, (result, (chain, network, address)) in enumerate(zip(results, task_metadata)): chain_network = f"{chain}-{network}" @@ -1415,6 +1470,21 @@ async def _update_gateway_balances(self): # Store empty list to indicate we checked this network self.accounts_state["master_account"][chain_network] = [] + # Remove stale gateway chain-network keys (wallets that were deleted) + # Gateway keys follow pattern: chain-network (e.g., "solana-mainnet-beta", "ethereum-mainnet") + stale_keys = [] + for key in self.accounts_state["master_account"]: + # Check if key looks like a gateway chain-network (contains hyphen and matches chain pattern) + if "-" in key and key not in active_chain_networks: + # Verify it's a gateway key by checking if chain part matches known chains + chain_part = key.split("-")[0] + if chain_part in chain_networks_map: + stale_keys.append(key) + + for key in stale_keys: + logger.info(f"Removing stale Gateway balance data for {key} (wallet no longer exists)") + del self.accounts_state["master_account"][key] + except Exception as e: logger.error(f"Error updating Gateway balances: {e}") @@ -1538,36 +1608,63 @@ async def get_gateway_balances(self, chain: str, address: str, network: Optional connector_name = f"gateway_{chain}-{network}" # Try to get cached prices first + # Try USDT first (more common in CEX like Binance), then USDC (common in DEX) prices_from_cache = {} tokens_need_update = [] + token_to_trading_pair = {} # Maps token -> trading_pair that has the price if self.market_data_feed_manager: for token in unique_tokens: try: token_unwrapped = self.get_unwrapped_token(token) - trading_pair = f"{token_unwrapped}-USDT" - cached_price = self.market_data_feed_manager.market_data_provider.get_rate(trading_pair) - if cached_price > 0: - prices_from_cache[trading_pair] = cached_price - else: + # Try USDT first (Binance, etc.), then USDC as fallback (DEX) + found_price = False + for quote in ["USDT", "USDC"]: + trading_pair = f"{token_unwrapped}-{quote}" + try: + cached_price = self.market_data_feed_manager.market_data_provider.get_rate(trading_pair) + if cached_price > 0: + prices_from_cache[token] = cached_price + token_to_trading_pair[token] = trading_pair + found_price = True + break + except Exception: + continue + if not found_price: tokens_need_update.append(token) except Exception: tokens_need_update.append(token) else: tokens_need_update = unique_tokens - # Initialize rate sources for Gateway (using "gateway" as connector for AMM pairs) - if tokens_need_update: - pricing_connector = self.gateway_default_pricing_connector[chain] + # Initialize rate sources for Gateway using the old format: "gateway_{chain}-{network}" + # The MarketDataProvider.update_rates_task() will detect this format and resolve + # the correct pricing connector (jupiter/router for solana, uniswap/router for ethereum, etc.) + if tokens_need_update and self.market_data_feed_manager: + # Use the format that MarketDataProvider expects for gateway connectors + gateway_connector_key = f"gateway_{chain}-{network}" trading_pairs_need_update = [f"{token}-USDC" for token in tokens_need_update] - connector_pairs = [ConnectorPair(connector_name=pricing_connector, trading_pair=tp) for tp in trading_pairs_need_update] + connector_pairs = [ConnectorPair(connector_name=gateway_connector_key, trading_pair=tp) for tp in trading_pairs_need_update] + for pair in connector_pairs: self.market_data_feed_manager.market_data_provider._rates_required.add_or_update( - f"gateway_{chain}-{network}", pair + gateway_connector_key, pair + ) + logger.info(f"Added {len(trading_pairs_need_update)} Gateway trading pairs to market data provider for {gateway_connector_key}: {trading_pairs_need_update}") + + # Trigger immediate price fetch for the new tokens + try: + fetched_prices = await self._fetch_gateway_prices_immediate( + chain, network, tokens_need_update ) - logger.info(f"Added {len(trading_pairs_need_update)} Gateway trading pairs to market data provider: {trading_pairs_need_update}") + for token, price in fetched_prices.items(): + if price > 0: + prices_from_cache[token] = price + token_to_trading_pair[token] = f"{token}-USDC" + except Exception as e: + logger.warning(f"Error fetching immediate gateway prices: {e}") - # Use cached prices (rate sources will update in background) + # Use cached prices all_prices = prices_from_cache # Format final result with prices @@ -1577,8 +1674,8 @@ async def get_gateway_balances(self, chain: str, address: str, network: Optional if "USD" in token: price = Decimal("1") else: - market = self.get_default_market(token, connector_name) - price = Decimal(str(all_prices.get(market, 0))) + # all_prices is now keyed by token name directly + price = Decimal(str(all_prices.get(token, 0))) formatted_balances.append({ "token": token, @@ -1596,6 +1693,77 @@ async def get_gateway_balances(self, chain: str, address: str, network: Optional logger.error(f"Error getting Gateway balances: {e}") raise HTTPException(status_code=500, detail=f"Failed to get balances: {str(e)}") + async def _fetch_gateway_prices_immediate(self, chain: str, network: str, + tokens: List[str]) -> Dict[str, Decimal]: + """ + Fetch prices immediately from Gateway for the given tokens. + This is used to get prices right away instead of waiting for the background update task. + + Uses the same pricing connector resolution as MarketDataProvider.update_rates_task(): + - solana -> jupiter/router + - ethereum -> uniswap/router + + Args: + chain: Blockchain chain (e.g., 'solana', 'ethereum') + network: Network name (e.g., 'mainnet-beta', 'mainnet') + tokens: List of token symbols to get prices for + + Returns: + Dictionary mapping token symbol to price in USDC + """ + from hummingbot.core.gateway.gateway_http_client import GatewayHttpClient + from hummingbot.core.rate_oracle.rate_oracle import RateOracle + from hummingbot.core.data_type.common import TradeType + + gateway_client = GatewayHttpClient.get_instance() + rate_oracle = RateOracle.get_instance() + prices = {} + + # Resolve pricing connector based on chain (same logic as MarketDataProvider) + pricing_connector = self.gateway_default_pricing_connector.get(chain) + if not pricing_connector: + logger.warning(f"No pricing connector configured for chain '{chain}', skipping immediate price fetch") + return prices + + # Create tasks for all tokens in parallel + tasks = [] + task_tokens = [] + + for token in tokens: + try: + task = gateway_client.get_price( + chain=chain, + network=network, + connector=pricing_connector, + base_asset=token, + quote_asset="USDC", + amount=Decimal("1"), + side=TradeType.BUY + ) + tasks.append(task) + task_tokens.append(token) + except Exception as e: + logger.warning(f"Error preparing price request for {token}: {e}") + continue + + if tasks: + try: + results = await asyncio.gather(*tasks, return_exceptions=True) + for token, result in zip(task_tokens, results): + if isinstance(result, Exception): + logger.warning(f"Error fetching price for {token}: {result}") + elif result and "price" in result: + price = Decimal(str(result["price"])) + prices[token] = price + # Also update the rate oracle so future lookups can find it + trading_pair = f"{token}-USDC" + rate_oracle.set_price(trading_pair, price) + logger.debug(f"Fetched immediate price for {token}: {price} USDC") + except Exception as e: + logger.error(f"Error fetching gateway prices: {e}", exc_info=True) + + return prices + def get_unwrapped_token(self, token: str) -> str: """Get the unwrapped version of a wrapped token symbol.""" for pw in self.potential_wrapped_tokens: diff --git a/services/gateway_client.py b/services/gateway_client.py index 6f788a3b..9cfac128 100644 --- a/services/gateway_client.py +++ b/services/gateway_client.py @@ -59,19 +59,25 @@ async def _request(self, method: str, path: str, params: Dict = None, json: Dict try: if method == "GET": async with session.get(url, params=params) as response: - response.raise_for_status() + if not response.ok: + error_body = await self._get_error_body(response) + logger.warning(f"Gateway request failed: {method} {url} - {response.status} - {error_body}") + return {"error": error_body, "status": response.status} return await response.json() elif method == "POST": async with session.post(url, json=json) as response: - response.raise_for_status() + if not response.ok: + error_body = await self._get_error_body(response) + logger.warning(f"Gateway request failed: {method} {url} - {response.status} - {error_body}") + return {"error": error_body, "status": response.status} return await response.json() elif method == "DELETE": async with session.delete(url, params=params, json=json) as response: - response.raise_for_status() + if not response.ok: + error_body = await self._get_error_body(response) + logger.warning(f"Gateway request failed: {method} {url} - {response.status} - {error_body}") + return {"error": error_body, "status": response.status} return await response.json() - except aiohttp.ClientResponseError as e: - logger.warning(f"Gateway request failed with status {e.status}: {method} {url} - {e.message}") - return None except aiohttp.ClientError as e: logger.debug(f"Gateway request error: {method} {url} - {e}") return None @@ -79,6 +85,19 @@ async def _request(self, method: str, path: str, params: Dict = None, json: Dict logger.debug(f"Gateway request failed: {method} {url} - {e}") raise + async def _get_error_body(self, response: aiohttp.ClientResponse) -> str: + """Extract error message from response body""" + try: + data = await response.json() + if isinstance(data, dict): + return data.get("message") or data.get("error") or str(data) + return str(data) + except Exception: + try: + return await response.text() + except Exception: + return f"HTTP {response.status}" + async def ping(self) -> bool: """Check if Gateway is online""" try: @@ -185,15 +204,39 @@ async def get_pools(self, connector: str, network: str) -> List[Dict]: "network": network }) - async def add_pool(self, connector: str, pool_type: str, network: str, base_symbol: str, quote_symbol: str, address: str) -> Dict: + async def add_pool( + self, + connector: str, + pool_type: str, + network: str, + address: str, + base_symbol: str, + quote_symbol: str, + base_token_address: str, + quote_token_address: str, + fee_pct: Optional[float] = None + ) -> Dict: """Add a new pool""" - return await self._request("POST", "pools", json={ + payload = { "connector": connector, - "type": pool_type, + "type": pool_type.lower(), # Gateway expects lowercase (amm, clmm) "network": network, + "address": address, "baseSymbol": base_symbol, "quoteSymbol": quote_symbol, - "address": address + "baseTokenAddress": base_token_address, + "quoteTokenAddress": quote_token_address + } + if fee_pct is not None: + payload["feePct"] = fee_pct + return await self._request("POST", "pools", json=payload) + + async def delete_pool(self, connector: str, network: str, pool_type: str, address: str) -> Dict: + """Delete a pool from Gateway's pool list""" + return await self._request("DELETE", f"pools/{address}", params={ + "connector": connector, + "network": network, + "type": pool_type.lower() # Gateway expects lowercase (amm, clmm) }) async def pool_info(self, connector: str, network: str, pool_address: str) -> Dict: @@ -375,13 +418,13 @@ async def clmm_position_info( wallet_address: str, position_address: str ) -> Dict: - """Get CLMM position information""" - return await self._request("POST", "clmm/liquidity/position", json={ - "connector": connector, + """Get CLMM position information including pending fees""" + params = { "network": network, - "address": wallet_address, + "walletAddress": wallet_address, "positionAddress": position_address - }) + } + return await self._request("GET", f"connectors/{connector}/clmm/position-info", params=params) async def clmm_positions_owned( self, diff --git a/services/gateway_transaction_poller.py b/services/gateway_transaction_poller.py index 938283ef..9a0b1090 100644 --- a/services/gateway_transaction_poller.py +++ b/services/gateway_transaction_poller.py @@ -12,6 +12,9 @@ from datetime import datetime, timedelta, timezone from decimal import Decimal +from sqlalchemy import select +from sqlalchemy.orm import selectinload + from database import AsyncDatabaseManager from database.repositories import GatewaySwapRepository, GatewayCLMMRepository from database.models import GatewayCLMMEvent, GatewayCLMMPosition @@ -190,14 +193,11 @@ async def _poll_swap_transaction(self, swap, swap_repo: GatewaySwapRepository): async def _poll_clmm_event_transaction(self, event, clmm_repo: GatewayCLMMRepository): """Poll a specific CLMM event transaction status.""" try: - # Get the position to access network info - position = await clmm_repo.get_position_by_address( - position_address=(await self.db_manager.get_session_context().__aenter__()) - .query(GatewayCLMMEvent) - .filter(GatewayCLMMEvent.id == event.id) - .first() - .position.position_address + # Get the position by ID from the event's position_id foreign key + result = await clmm_repo.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.id == event.position_id) ) + position = result.scalar_one_or_none() if not position: logger.error(f"Position not found for CLMM event {event.transaction_hash}") @@ -245,35 +245,32 @@ async def _poll_clmm_event_transaction(self, event, clmm_repo: GatewayCLMMReposi async def _update_position_from_event(self, event, clmm_repo: GatewayCLMMRepository): """Update CLMM position state based on confirmed event.""" try: - # Get position through session - async with self.db_manager.get_session_context() as session: - result = await session.execute( - session.query(GatewayCLMMEvent).filter(GatewayCLMMEvent.id == event.id) - ) - event_with_position = result.scalar_one_or_none() - - if not event_with_position or not event_with_position.position: - logger.error(f"Position not found for event {event.id}") - return - - position = event_with_position.position + # Get position by ID using the existing clmm_repo session + result = await clmm_repo.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.id == event.position_id) + ) + position = result.scalar_one_or_none() - if event.event_type == "CLOSE": - await clmm_repo.close_position(position.position_address) + if not position: + logger.error(f"Position not found for event {event.id}") + return - elif event.event_type == "COLLECT_FEES": - # Add collected fees to cumulative total - if event.base_fee_collected or event.quote_fee_collected: - new_base_collected = float(position.base_fee_collected or 0) + float(event.base_fee_collected or 0) - new_quote_collected = float(position.quote_fee_collected or 0) + float(event.quote_fee_collected or 0) + if event.event_type == "CLOSE": + await clmm_repo.close_position(position.position_address) - await clmm_repo.update_position_fees( - position_address=position.position_address, - base_fee_collected=Decimal(str(new_base_collected)), - quote_fee_collected=Decimal(str(new_quote_collected)), - base_fee_pending=Decimal("0"), - quote_fee_pending=Decimal("0") - ) + elif event.event_type == "COLLECT_FEES": + # Add collected fees to cumulative total + if event.base_fee_collected or event.quote_fee_collected: + new_base_collected = float(position.base_fee_collected or 0) + float(event.base_fee_collected or 0) + new_quote_collected = float(position.quote_fee_collected or 0) + float(event.quote_fee_collected or 0) + + await clmm_repo.update_position_fees( + position_address=position.position_address, + base_fee_collected=Decimal(str(new_base_collected)), + quote_fee_collected=Decimal(str(new_quote_collected)), + base_fee_pending=Decimal("0"), + quote_fee_pending=Decimal("0") + ) except Exception as e: logger.error(f"Error updating position from event: {e}", exc_info=True) @@ -463,27 +460,39 @@ async def _refresh_position_state(self, position: GatewayCLMMPosition, clmm_repo chain, network = parts - # Get all positions for this pool from Gateway + # Get individual position info from Gateway (includes pending fees) try: - positions_list = await self.gateway_client.clmm_positions_owned( + result = await self.gateway_client.clmm_position_info( connector=position.connector, network=network, wallet_address=position.wallet_address, - pool_address=position.pool_address + position_address=position.position_address ) - # Find our specific position in the list - result = None - if isinstance(positions_list, list): - for pos in positions_list: - if pos.get("address") == position.position_address: - result = pos - break - - # If position not found, it was closed externally + # Check for Gateway errors if result is None: - logger.info(f"Position {position.position_address} not found on Gateway, marking as CLOSED") - await clmm_repo.close_position(position.position_address) + logger.debug(f"Gateway connection error for position {position.position_address}, skipping update") + return + + if not isinstance(result, dict): + logger.warning(f"Unexpected response type for position {position.position_address}: {type(result)}") + return + + # Check if Gateway returned an error response + if "error" in result: + status_code = result.get("status") + # 404 means position doesn't exist on-chain → close it + if status_code == 404: + logger.info(f"Position {position.position_address} not found on Gateway (404), marking as CLOSED") + await clmm_repo.close_position(position.position_address) + return + # Other errors (500, timeout, etc.) → skip update, don't close + logger.debug(f"Gateway error for position {position.position_address}: {result.get('error')} (status: {status_code})") + return + + # Validate response has required fields + if "address" not in result: + logger.warning(f"Invalid response for position {position.position_address}, missing 'address' field") return except Exception as e: @@ -503,37 +512,46 @@ async def _refresh_position_state(self, position: GatewayCLMMPosition, clmm_repo else: in_range = "OUT_OF_RANGE" - # Extract token amounts - base_token_amount = Decimal(str(result.get("baseTokenAmount", 0))) - quote_token_amount = Decimal(str(result.get("quoteTokenAmount", 0))) + # Extract token amounts - validate they exist in response + base_amount_raw = result.get("baseTokenAmount") + quote_amount_raw = result.get("quoteTokenAmount") - # Check if position has been closed (zero liquidity) + # If amounts are missing or None, skip update (don't assume zero) + if base_amount_raw is None or quote_amount_raw is None: + logger.warning(f"Position {position.position_address} missing token amounts in response, skipping update") + return + + base_token_amount = Decimal(str(base_amount_raw)) + quote_token_amount = Decimal(str(quote_amount_raw)) + + # If Gateway confirms zero liquidity, position was closed externally if base_token_amount == 0 and quote_token_amount == 0: logger.info(f"Position {position.position_address} has zero liquidity, marking as CLOSED") await clmm_repo.close_position(position.position_address) return - # Update liquidity amounts and in_range status + # Update liquidity amounts, in_range status, and current price await clmm_repo.update_position_liquidity( position_address=position.position_address, base_token_amount=base_token_amount, quote_token_amount=quote_token_amount, - in_range=in_range + in_range=in_range, + current_price=current_price ) - # Update pending fees if available + # Update pending fees (always update to keep in sync with on-chain state) base_fee_pending = Decimal(str(result.get("baseFeeAmount", 0))) quote_fee_pending = Decimal(str(result.get("quoteFeeAmount", 0))) - if base_fee_pending or quote_fee_pending: - await clmm_repo.update_position_fees( - position_address=position.position_address, - base_fee_pending=base_fee_pending, - quote_fee_pending=quote_fee_pending - ) + await clmm_repo.update_position_fees( + position_address=position.position_address, + base_fee_pending=base_fee_pending, + quote_fee_pending=quote_fee_pending + ) - logger.debug(f"Refreshed position {position.position_address}: in_range={in_range}, " - f"base={base_token_amount}, quote={quote_token_amount}") + logger.debug(f"Refreshed position {position.position_address}: price={current_price}, in_range={in_range}, " + f"base={base_token_amount}, quote={quote_token_amount}, " + f"base_fee={base_fee_pending}, quote_fee={quote_fee_pending}") except Exception as e: logger.error(f"Error refreshing position state {position.position_address}: {e}", exc_info=True) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index caa6d759..021a4512 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -161,9 +161,9 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd logger.info(f"OrdersRecorder: Updated exchange_order_id to {exchange_order_id} for order {event.order_id}") # Update status if it's still in PENDING_CREATE or similar early state - if existing_order.status in ["PENDING_CREATE", "PENDING"]: - existing_order.status = "SUBMITTED" - logger.info(f"OrdersRecorder: Updated status from {existing_order.status} to SUBMITTED for order {event.order_id}") + if existing_order.status in ["PENDING_CREATE", "PENDING", "SUBMITTED"]: + existing_order.status = "OPEN" + logger.info(f"OrdersRecorder: Updated status to OPEN for order {event.order_id}") await session.flush() return @@ -177,7 +177,7 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd "order_type": event.type.name if hasattr(event, 'type') else 'UNKNOWN', "amount": float(event.amount), "price": float(event.price) if event.price else None, - "status": "SUBMITTED", + "status": "OPEN", "exchange_order_id": getattr(event, 'exchange_order_id', None) } await order_repo.create_order(order_data) @@ -237,10 +237,18 @@ async def _handle_order_filled(self, event: OrderFilledEvent): # Validate all values before creating trade record validated_timestamp = event.timestamp if event.timestamp and not math.isnan(event.timestamp) else time.time() validated_fee = trade_fee_paid if trade_fee_paid and not math.isnan(trade_fee_paid) else 0 - + + # Use exchange_trade_id if available (unique per fill), fallback to generated id + exchange_trade_id = getattr(event, 'exchange_trade_id', None) + if exchange_trade_id: + trade_id = f"{event.order_id}_{exchange_trade_id}" + else: + # Fallback: include amount to differentiate partial fills at same timestamp + trade_id = f"{event.order_id}_{validated_timestamp}_{float(filled_amount)}" + trade_data = { "order_id": order.id, - "trade_id": f"{event.order_id}_{validated_timestamp}", + "trade_id": trade_id, "timestamp": datetime.fromtimestamp(validated_timestamp), "trading_pair": event.trading_pair, "trade_type": event.trade_type.name,