From e662e246f565938c6f0ac832809b3f32941c4734 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:25:34 -0300 Subject: [PATCH 01/20] (feat) add executors repository --- database/models.py | 82 +++++++++++++++++++++++++++---- database/repositories/__init__.py | 6 ++- 2 files changed, 76 insertions(+), 12 deletions(-) diff --git a/database/models.py b/database/models.py index bd2b5ae7..830e40d9 100644 --- a/database/models.py +++ b/database/models.py @@ -1,13 +1,4 @@ -from sqlalchemy import ( - TIMESTAMP, - Column, - ForeignKey, - Integer, - Numeric, - String, - Text, - func, -) +from sqlalchemy import TIMESTAMP, Column, ForeignKey, Integer, Numeric, String, Text, func from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship @@ -353,3 +344,74 @@ class GatewayCLMMEvent(Base): position = relationship("GatewayCLMMPosition", back_populates="events") +class ExecutorRecord(Base): + """Database model for executor state persistence.""" + __tablename__ = "executors" + + id = Column(Integer, primary_key=True, index=True) + + # Executor identification + executor_id = Column(String, nullable=False, unique=True, index=True) + executor_type = Column(String, nullable=False, index=True) + + # Account and connector info + account_name = Column(String, nullable=False, index=True) + connector_name = Column(String, nullable=False, index=True) + trading_pair = Column(String, nullable=False, index=True) + + # Timestamps + created_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + closed_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) + + # Status + status = Column(String, nullable=False, default="RUNNING", index=True) + close_type = Column(String, nullable=True) + + # Performance metrics + net_pnl_quote = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + net_pnl_pct = Column(Numeric(precision=10, scale=6), nullable=False, default=0) + cum_fees_quote = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + filled_amount_quote = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + + # Configuration (JSON) + config = Column(Text, nullable=True) + + # Final state (JSON) + final_state = Column(Text, nullable=True) + + # Relationships + orders = relationship("ExecutorOrder", back_populates="executor", cascade="all, delete-orphan") + + +class ExecutorOrder(Base): + """Database model for orders created by executors.""" + __tablename__ = "executor_orders" + + id = Column(Integer, primary_key=True, index=True) + + # Executor reference + executor_id = Column(String, ForeignKey("executors.executor_id"), nullable=False, index=True) + + # Order identification + client_order_id = Column(String, nullable=False, index=True) + exchange_order_id = Column(String, nullable=True) + + # Order details + order_type = Column(String, nullable=False) # open, close, take_profit, stop_loss + trade_type = Column(String, nullable=False) # BUY, SELL + amount = Column(Numeric(precision=30, scale=18), nullable=False) + price = Column(Numeric(precision=30, scale=18), nullable=True) + + # Execution + status = Column(String, nullable=False, default="SUBMITTED") + filled_amount = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + average_fill_price = Column(Numeric(precision=30, scale=18), nullable=True) + + # Timestamps + created_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False) + updated_at = Column(TIMESTAMP(timezone=True), onupdate=func.now(), nullable=True) + + # Relationship + executor = relationship("ExecutorRecord", back_populates="orders") + + diff --git a/database/repositories/__init__.py b/database/repositories/__init__.py index fa49c47e..bc8dd107 100644 --- a/database/repositories/__init__.py +++ b/database/repositories/__init__.py @@ -1,14 +1,16 @@ from .account_repository import AccountRepository from .bot_run_repository import BotRunRepository +from .executor_repository import ExecutorRepository from .funding_repository import FundingRepository +from .gateway_clmm_repository import GatewayCLMMRepository +from .gateway_swap_repository import GatewaySwapRepository from .order_repository import OrderRepository from .trade_repository import TradeRepository -from .gateway_swap_repository import GatewaySwapRepository -from .gateway_clmm_repository import GatewayCLMMRepository __all__ = [ "AccountRepository", "BotRunRepository", + "ExecutorRepository", "FundingRepository", "OrderRepository", "TradeRepository", From 03cee634b504de49cac3fe0e364747e313b0fe53 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:25:57 -0300 Subject: [PATCH 02/20] (feat) add order book tracking registry and removal --- models/__init__.py | 31 +++++++++++++++++++++++++++++++ models/market_data.py | 27 ++++++++++++++++++++++++++- 2 files changed, 57 insertions(+), 1 deletion(-) diff --git a/models/__init__.py b/models/__init__.py index b0d04fb4..b2575017 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -88,6 +88,10 @@ PriceForQuoteVolumeRequest, VWAPForVolumeRequest, OrderBookQueryResult, + # Trading pair management models + AddTradingPairRequest, + RemoveTradingPairRequest, + TradingPairResponse, ) # Account models @@ -205,6 +209,19 @@ SingleRateResponse, ) +# Executor models +from .executors import ( + CreateExecutorRequest, + CreateExecutorResponse, + StopExecutorRequest, + StopExecutorResponse, + DeleteExecutorResponse, + ExecutorFilterRequest, + ExecutorResponse, + ExecutorDetailResponse, + ExecutorsSummaryResponse, +) + __all__ = [ # Bot orchestration models "BotAction", @@ -275,6 +292,10 @@ "PriceForQuoteVolumeRequest", "VWAPForVolumeRequest", "OrderBookQueryResult", + # Trading pair management models + "AddTradingPairRequest", + "RemoveTradingPairRequest", + "TradingPairResponse", # Account models "LeverageRequest", "PositionModeRequest", @@ -363,4 +384,14 @@ "RateRequest", "RateResponse", "SingleRateResponse", + # Executor models + "CreateExecutorRequest", + "CreateExecutorResponse", + "StopExecutorRequest", + "StopExecutorResponse", + "DeleteExecutorResponse", + "ExecutorFilterRequest", + "ExecutorResponse", + "ExecutorDetailResponse", + "ExecutorsSummaryResponse", ] \ No newline at end of file diff --git a/models/market_data.py b/models/market_data.py index 1f618ec2..047f6b6f 100644 --- a/models/market_data.py +++ b/models/market_data.py @@ -165,4 +165,29 @@ class OrderBookQueryResult(BaseModel): result_volume: Optional[float] = Field(default=None, description="Resulting volume") result_quote_volume: Optional[float] = Field(default=None, description="Resulting quote volume") average_price: Optional[float] = Field(default=None, description="Average/VWAP price") - timestamp: float = Field(description="Query timestamp") \ No newline at end of file + timestamp: float = Field(description="Query timestamp") + + +# Trading Pair Management Models + +class AddTradingPairRequest(BaseModel): + """Request model for adding a trading pair to order book tracking""" + connector_name: str = Field(description="Name of the connector (e.g., 'binance', 'binance_perpetual')") + trading_pair: str = Field(description="Trading pair to add (e.g., 'BTC-USDT')") + account_name: Optional[str] = Field(default=None, description="Optional account name for trading connector preference") + timeout: float = Field(default=30.0, ge=1.0, le=120.0, description="Timeout in seconds for order book initialization") + + +class RemoveTradingPairRequest(BaseModel): + """Request model for removing a trading pair from order book tracking""" + connector_name: str = Field(description="Name of the connector") + trading_pair: str = Field(description="Trading pair to remove") + account_name: Optional[str] = Field(default=None, description="Optional account name for trading connector preference") + + +class TradingPairResponse(BaseModel): + """Response model for trading pair management operations""" + success: bool = Field(description="Whether the operation succeeded") + connector_name: str = Field(description="Name of the connector") + trading_pair: str = Field(description="Trading pair that was added/removed") + message: str = Field(description="Status message") \ No newline at end of file From 5c2670bc88087aca443f9d3bea1bff07f7d15214 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:26:12 -0300 Subject: [PATCH 03/20] (feat) add executors model --- models/executors.py | 211 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 211 insertions(+) create mode 100644 models/executors.py diff --git a/models/executors.py b/models/executors.py new file mode 100644 index 00000000..7d892e30 --- /dev/null +++ b/models/executors.py @@ -0,0 +1,211 @@ +""" +Pydantic models for executor API endpoints. + +These models wrap Hummingbot's executor configuration types and provide +validation for the REST API. +""" +from datetime import datetime +from typing import Any, Dict, List, Literal, Optional + +from pydantic import BaseModel, ConfigDict, Field + +from .pagination import PaginationParams + + +# ======================================== +# Executor Type Definitions +# ======================================== + +EXECUTOR_TYPES = Literal[ + "position_executor", + "grid_executor", + "dca_executor", + "arbitrage_executor", + "twap_executor", + "xemm_executor", + "order_executor" +] + + +# ======================================== +# API Request Models +# ======================================== + +class CreateExecutorRequest(BaseModel): + """Request to create a new executor.""" + model_config = ConfigDict( + json_schema_extra={ + "example": { + "account_name": "master_account", + "executor_config": { + "type": "position_executor", + "connector_name": "binance_perpetual", + "trading_pair": "BTC-USDT", + "side": "BUY", + "amount": "0.01", + "leverage": 10, + "triple_barrier_config": { + "stop_loss": "0.02", + "take_profit": "0.04", + "time_limit": 3600 + } + } + } + } + ) + + account_name: Optional[str] = Field( + None, + description="Account name to use (defaults to master_account)" + ) + executor_config: Dict[str, Any] = Field( + ..., + description="Executor configuration. Must include 'type' field and executor-specific parameters." + ) + + +class StopExecutorRequest(BaseModel): + """Request to stop an executor.""" + keep_position: bool = Field( + default=False, + description="Whether to keep the position open (for position executors)" + ) + + +class ExecutorFilterRequest(PaginationParams): + """Request to filter and list executors.""" + account_names: Optional[List[str]] = Field( + None, + description="Filter by account names" + ) + connector_names: Optional[List[str]] = Field( + None, + description="Filter by connector names" + ) + trading_pairs: Optional[List[str]] = Field( + None, + description="Filter by trading pairs" + ) + executor_types: Optional[List[EXECUTOR_TYPES]] = Field( + None, + description="Filter by executor types" + ) + status: Optional[str] = Field( + None, + description="Filter by status (RUNNING, TERMINATED, etc.)" + ) + include_completed: bool = Field( + default=False, + description="Include recently completed executors" + ) + + +# ======================================== +# API Response Models +# ======================================== + +class ExecutorResponse(BaseModel): + """Response for a single executor (summary view).""" + model_config = ConfigDict( + json_schema_extra={ + "example": { + "executor_id": "abc123...", + "executor_type": "position_executor", + "account_name": "master_account", + "connector_name": "binance_perpetual", + "trading_pair": "BTC-USDT", + "side": "BUY", + "status": "RUNNING", + "is_active": True, + "is_trading": True, + "timestamp": 1705315800.0, + "created_at": "2024-01-15T10:30:00Z", + "close_type": None, + "close_timestamp": None, + "controller_id": None, + "net_pnl_quote": 125.50, + "net_pnl_pct": 2.5, + "cum_fees_quote": 1.25, + "filled_amount_quote": 5000.0 + } + } + ) + + executor_id: str = Field(description="Unique executor identifier") + executor_type: Optional[str] = Field(description="Type of executor") + account_name: Optional[str] = Field(description="Account name") + connector_name: Optional[str] = Field(description="Connector name") + trading_pair: Optional[str] = Field(description="Trading pair") + side: Optional[str] = Field(None, description="Trade side (BUY/SELL) if applicable") + status: str = Field(description="Current status (RUNNING, TERMINATED, etc.)") + is_active: bool = Field(description="Whether the executor is active") + is_trading: bool = Field(description="Whether the executor has open trades") + timestamp: Optional[float] = Field(None, description="Creation timestamp (Unix)") + created_at: Optional[str] = Field(None, description="Creation timestamp (ISO format)") + close_type: Optional[str] = Field(None, description="How the executor was closed (if applicable)") + close_timestamp: Optional[float] = Field(None, description="Close timestamp (Unix)") + controller_id: Optional[str] = Field(None, description="ID of the controller that spawned this executor") + net_pnl_quote: float = Field(description="Net PnL in quote currency") + net_pnl_pct: float = Field(description="Net PnL percentage") + cum_fees_quote: float = Field(description="Cumulative fees in quote currency") + filled_amount_quote: float = Field(description="Total filled amount in quote currency") + + +class ExecutorDetailResponse(ExecutorResponse): + """Detailed response for a single executor.""" + config: Optional[Dict[str, Any]] = Field( + None, + description="Full executor configuration" + ) + custom_info: Optional[Dict[str, Any]] = Field( + None, + description="Executor-specific custom information" + ) + + +class CreateExecutorResponse(BaseModel): + """Response after creating an executor.""" + executor_id: str = Field(description="Unique executor identifier") + executor_type: str = Field(description="Type of executor created") + connector_name: str = Field(description="Connector name") + trading_pair: str = Field(description="Trading pair") + status: str = Field(description="Initial status") + created_at: str = Field(description="Creation timestamp (ISO format)") + + +class StopExecutorResponse(BaseModel): + """Response after stopping an executor.""" + executor_id: str = Field(description="Executor identifier") + status: str = Field(description="New status (usually 'stopping')") + keep_position: bool = Field(description="Whether position was kept open") + + +class ExecutorsSummaryResponse(BaseModel): + """Summary of all executors.""" + model_config = ConfigDict( + json_schema_extra={ + "example": { + "total_active": 5, + "total_completed": 23, + "total_pnl_quote": 1234.56, + "total_volume_quote": 50000.00, + "by_type": {"position_executor": 3, "grid_executor": 2}, + "by_connector": {"binance_perpetual": 4, "binance": 1}, + "by_status": {"RUNNING": 5, "TERMINATED": 23} + } + } + ) + + total_active: int = Field(description="Number of active executors") + total_completed: int = Field(description="Number of completed executors") + total_pnl_quote: float = Field(description="Total PnL across all executors") + total_volume_quote: float = Field(description="Total volume across all executors") + by_type: Dict[str, int] = Field(description="Executor count by type") + by_connector: Dict[str, int] = Field(description="Executor count by connector") + by_status: Dict[str, int] = Field(description="Executor count by status") + + +class DeleteExecutorResponse(BaseModel): + """Response after deleting an executor from tracking.""" + message: str = Field(description="Success message") + executor_id: str = Field(description="Executor identifier that was removed") From 29389d7426a5eacc4086c873602788f57626ca68 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:26:17 -0300 Subject: [PATCH 04/20] (feat) add executors repositor --- database/repositories/executor_repository.py | 283 +++++++++++++++++++ 1 file changed, 283 insertions(+) create mode 100644 database/repositories/executor_repository.py diff --git a/database/repositories/executor_repository.py b/database/repositories/executor_repository.py new file mode 100644 index 00000000..72d034aa --- /dev/null +++ b/database/repositories/executor_repository.py @@ -0,0 +1,283 @@ +""" +Repository for executor database operations. +""" +from datetime import datetime, timezone +from decimal import Decimal +from typing import Any, Dict, List, Optional + +from sqlalchemy import and_, desc, func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import ExecutorOrder, ExecutorRecord + + +class ExecutorRepository: + """Repository for ExecutorRecord and ExecutorOrder database operations.""" + + def __init__(self, session: AsyncSession): + self.session = session + + # ======================================== + # ExecutorRecord Operations + # ======================================== + + async def create_executor( + self, + executor_id: str, + executor_type: str, + account_name: str, + connector_name: str, + trading_pair: str, + config: Optional[str] = None, + status: str = "RUNNING" + ) -> ExecutorRecord: + """Create a new executor record.""" + executor = ExecutorRecord( + executor_id=executor_id, + executor_type=executor_type, + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + config=config, + status=status + ) + + self.session.add(executor) + await self.session.flush() + await self.session.refresh(executor) + return executor + + async def update_executor( + self, + executor_id: str, + status: Optional[str] = None, + close_type: Optional[str] = None, + net_pnl_quote: Optional[Decimal] = None, + net_pnl_pct: Optional[Decimal] = None, + cum_fees_quote: Optional[Decimal] = None, + filled_amount_quote: Optional[Decimal] = None, + final_state: Optional[str] = None + ) -> Optional[ExecutorRecord]: + """Update an executor record.""" + stmt = select(ExecutorRecord).where(ExecutorRecord.executor_id == executor_id) + result = await self.session.execute(stmt) + executor = result.scalar_one_or_none() + + if executor: + if status is not None: + executor.status = status + if close_type is not None: + executor.close_type = close_type + executor.closed_at = datetime.now(timezone.utc) + if net_pnl_quote is not None: + executor.net_pnl_quote = net_pnl_quote + if net_pnl_pct is not None: + executor.net_pnl_pct = net_pnl_pct + if cum_fees_quote is not None: + executor.cum_fees_quote = cum_fees_quote + if filled_amount_quote is not None: + executor.filled_amount_quote = filled_amount_quote + if final_state is not None: + executor.final_state = final_state + + await self.session.flush() + await self.session.refresh(executor) + + return executor + + async def get_executor_by_id(self, executor_id: str) -> Optional[ExecutorRecord]: + """Get an executor by ID.""" + stmt = select(ExecutorRecord).where(ExecutorRecord.executor_id == executor_id) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def get_executors( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, + executor_type: Optional[str] = None, + status: Optional[str] = None, + limit: int = 100, + offset: int = 0 + ) -> List[ExecutorRecord]: + """Get executors with optional filters.""" + stmt = select(ExecutorRecord) + + conditions = [] + if account_name: + conditions.append(ExecutorRecord.account_name == account_name) + if connector_name: + conditions.append(ExecutorRecord.connector_name == connector_name) + if trading_pair: + conditions.append(ExecutorRecord.trading_pair == trading_pair) + if executor_type: + conditions.append(ExecutorRecord.executor_type == executor_type) + if status: + conditions.append(ExecutorRecord.status == status) + + if conditions: + stmt = stmt.where(and_(*conditions)) + + stmt = stmt.order_by(desc(ExecutorRecord.created_at)).limit(limit).offset(offset) + + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + async def get_active_executors( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None + ) -> List[ExecutorRecord]: + """Get all active (running) executors.""" + stmt = select(ExecutorRecord).where(ExecutorRecord.status == "RUNNING") + + if account_name: + stmt = stmt.where(ExecutorRecord.account_name == account_name) + if connector_name: + stmt = stmt.where(ExecutorRecord.connector_name == connector_name) + + stmt = stmt.order_by(desc(ExecutorRecord.created_at)) + + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + async def get_executor_stats(self) -> Dict[str, Any]: + """Get statistics about executors.""" + # Total executors + total_stmt = select(func.count(ExecutorRecord.id)) + total_result = await self.session.execute(total_stmt) + total_executors = total_result.scalar() or 0 + + # Active executors + active_stmt = select(func.count(ExecutorRecord.id)).where( + ExecutorRecord.status == "RUNNING" + ) + active_result = await self.session.execute(active_stmt) + active_executors = active_result.scalar() or 0 + + # Total PnL + pnl_stmt = select(func.sum(ExecutorRecord.net_pnl_quote)) + pnl_result = await self.session.execute(pnl_stmt) + total_pnl = pnl_result.scalar() or Decimal("0") + + # Total volume + volume_stmt = select(func.sum(ExecutorRecord.filled_amount_quote)) + volume_result = await self.session.execute(volume_stmt) + total_volume = volume_result.scalar() or Decimal("0") + + # Executors by type + type_stmt = select( + ExecutorRecord.executor_type, + func.count(ExecutorRecord.id).label('count') + ).group_by(ExecutorRecord.executor_type) + type_result = await self.session.execute(type_stmt) + type_counts = {row.executor_type: row.count for row in type_result} + + # Executors by status + status_stmt = select( + ExecutorRecord.status, + func.count(ExecutorRecord.id).label('count') + ).group_by(ExecutorRecord.status) + status_result = await self.session.execute(status_stmt) + status_counts = {row.status: row.count for row in status_result} + + # Executors by connector + connector_stmt = select( + ExecutorRecord.connector_name, + func.count(ExecutorRecord.id).label('count') + ).group_by(ExecutorRecord.connector_name) + connector_result = await self.session.execute(connector_stmt) + connector_counts = {row.connector_name: row.count for row in connector_result} + + return { + "total_executors": total_executors, + "active_executors": active_executors, + "total_pnl_quote": float(total_pnl), + "total_volume_quote": float(total_volume), + "type_counts": type_counts, + "status_counts": status_counts, + "connector_counts": connector_counts + } + + # ======================================== + # ExecutorOrder Operations + # ======================================== + + async def create_executor_order( + self, + executor_id: str, + client_order_id: str, + order_type: str, + trade_type: str, + amount: Decimal, + price: Optional[Decimal] = None, + exchange_order_id: Optional[str] = None, + status: str = "SUBMITTED" + ) -> ExecutorOrder: + """Create a new executor order record.""" + order = ExecutorOrder( + executor_id=executor_id, + client_order_id=client_order_id, + order_type=order_type, + trade_type=trade_type, + amount=amount, + price=price, + exchange_order_id=exchange_order_id, + status=status + ) + + self.session.add(order) + await self.session.flush() + await self.session.refresh(order) + return order + + async def update_executor_order( + self, + client_order_id: str, + status: Optional[str] = None, + filled_amount: Optional[Decimal] = None, + average_fill_price: Optional[Decimal] = None, + exchange_order_id: Optional[str] = None + ) -> Optional[ExecutorOrder]: + """Update an executor order record.""" + stmt = select(ExecutorOrder).where(ExecutorOrder.client_order_id == client_order_id) + result = await self.session.execute(stmt) + order = result.scalar_one_or_none() + + if order: + if status is not None: + order.status = status + if filled_amount is not None: + order.filled_amount = filled_amount + if average_fill_price is not None: + order.average_fill_price = average_fill_price + if exchange_order_id is not None: + order.exchange_order_id = exchange_order_id + + await self.session.flush() + await self.session.refresh(order) + + return order + + async def get_executor_orders( + self, + executor_id: str, + status: Optional[str] = None + ) -> List[ExecutorOrder]: + """Get orders for an executor.""" + stmt = select(ExecutorOrder).where(ExecutorOrder.executor_id == executor_id) + + if status: + stmt = stmt.where(ExecutorOrder.status == status) + + stmt = stmt.order_by(desc(ExecutorOrder.created_at)) + + result = await self.session.execute(stmt) + return list(result.scalars().all()) + + async def get_order_by_client_id(self, client_order_id: str) -> Optional[ExecutorOrder]: + """Get an order by client order ID.""" + stmt = select(ExecutorOrder).where(ExecutorOrder.client_order_id == client_order_id) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() From 0af64f5734234f9e002ce811f3fc4fb5e3c19eb1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:26:42 -0300 Subject: [PATCH 05/20] (feat) remove filter --- routers/portfolio.py | 60 +++----------------------------------------- 1 file changed, 3 insertions(+), 57 deletions(-) diff --git a/routers/portfolio.py b/routers/portfolio.py index 65bec864..671bd07f 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -7,7 +7,6 @@ PortfolioStateFilterRequest, PortfolioHistoryFilterRequest, PortfolioDistributionFilterRequest, - AccountsDistributionFilterRequest ) from services.accounts_service import AccountsService from deps import get_accounts_service @@ -291,67 +290,14 @@ async def get_portfolio_distribution( return distribution -@router.post("/accounts-distribution") +@router.get("/accounts-distribution") async def get_accounts_distribution( - filter_request: AccountsDistributionFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get portfolio distribution by accounts with percentages. - - Args: - filter_request: JSON payload with filtering criteria - + Returns: Dictionary with account distribution including percentages, values, and breakdown by connectors """ - all_distribution = accounts_service.get_account_distribution() - - # If no filter, return all accounts - if not filter_request.account_names: - return all_distribution - - # Filter the distribution by requested accounts - filtered_distribution = { - "accounts": {}, - "total_value": 0, - "account_count": 0 - } - - for account_name in filter_request.account_names: - if account_name in all_distribution.get("accounts", {}): - filtered_distribution["accounts"][account_name] = all_distribution["accounts"][account_name] - filtered_distribution["total_value"] += all_distribution["accounts"][account_name].get("total_value", 0) - - # Apply connector filter if specified - if filter_request.connector_names: - for account_name, account_data in filtered_distribution["accounts"].items(): - if "connectors" in account_data: - filtered_connectors = {} - for connector_name in filter_request.connector_names: - if connector_name in account_data["connectors"]: - filtered_connectors[connector_name] = account_data["connectors"][connector_name] - account_data["connectors"] = filtered_connectors - - # Recalculate account total after connector filtering - new_total = sum( - conn_data.get("total_balance_in_usd", 0) - for conn_data in filtered_connectors.values() - ) - account_data["total_value"] = new_total - - # Recalculate total_value after connector filtering - filtered_distribution["total_value"] = sum( - acc_data.get("total_value", 0) - for acc_data in filtered_distribution["accounts"].values() - ) - - # Recalculate percentages - total_value = filtered_distribution["total_value"] - if total_value > 0: - for account_data in filtered_distribution["accounts"].values(): - account_data["percentage"] = (account_data.get("total_value", 0) / total_value) * 100 - - filtered_distribution["account_count"] = len(filtered_distribution["accounts"]) - - return filtered_distribution + return accounts_service.get_account_distribution() From d1b8f8f9836c2b4bd0b4178ccedd70defb1025b5 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:27:07 -0300 Subject: [PATCH 06/20] (feat) re-engineer market data servivce --- routers/connectors.py | 27 +- routers/market_data.py | 142 +++++- routers/rate_oracle.py | 4 +- routers/trading.py | 40 +- services/market_data_feed_manager.py | 601 ----------------------- services/market_data_service.py | 701 +++++++++++++++++++++++++++ 6 files changed, 865 insertions(+), 650 deletions(-) delete mode 100644 services/market_data_feed_manager.py create mode 100644 services/market_data_service.py diff --git a/routers/connectors.py b/routers/connectors.py index 3c59b87f..85d7af86 100644 --- a/routers/connectors.py +++ b/routers/connectors.py @@ -4,9 +4,8 @@ from hummingbot.client.settings import AllConnectorSettings from deps import get_accounts_service -from models import AddTokenRequest from services.accounts_service import AccountsService -from services.market_data_feed_manager import MarketDataFeedManager +from services.market_data_service import MarketDataService router = APIRouter(tags=["Connectors"], prefix="/connectors") @@ -48,25 +47,25 @@ async def get_trading_rules( """ Get trading rules for a connector, optionally filtered by trading pairs. - This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + This endpoint uses the MarketDataService to access non-trading connector instances, which means no authentication or account setup is required. - + Args: request: FastAPI request object connector_name: Name of the connector (e.g., 'binance', 'binance_perpetual') trading_pairs: Optional list of trading pairs to filter by (e.g., ['BTC-USDT', 'ETH-USDT']) - + Returns: Dictionary mapping trading pairs to their trading rules - + Raises: HTTPException: 404 if connector not found, 500 for other errors """ try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - + market_data_service: MarketDataService = request.app.state.market_data_service + # Get trading rules (filtered by trading pairs if provided) - rules = await market_data_feed_manager.get_trading_rules(connector_name, trading_pairs) + rules = await market_data_service.get_trading_rules(connector_name, trading_pairs) if "error" in rules: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found or error: {rules['error']}") @@ -84,7 +83,7 @@ async def get_supported_order_types(request: Request, connector_name: str): """ Get order types supported by a specific connector. - This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + This endpoint uses the MarketDataService to access non-trading connector instances, which means no authentication or account setup is required. Args: @@ -98,12 +97,12 @@ async def get_supported_order_types(request: Request, connector_name: str): HTTPException: 404 if connector not found, 500 for other errors """ try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + market_data_service: MarketDataService = request.app.state.market_data_service - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - # This lazily creates the connector if it doesn't exist + # Access connector through UnifiedConnectorService + # This creates a data connector if it doesn't exist try: - connector_instance = market_data_feed_manager.market_data_provider._non_trading_connectors[connector_name] + connector_instance = market_data_service.connector_service.get_data_connector(connector_name) except (KeyError, ValueError) as e: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found: {str(e)}") diff --git a/routers/market_data.py b/routers/market_data.py index 214cb636..20771813 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -6,14 +6,15 @@ from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory from models.market_data import CandlesConfigRequest -from services.market_data_feed_manager import MarketDataFeedManager +from services.market_data_service import MarketDataService from models import ( PriceRequest, PricesResponse, FundingInfoRequest, FundingInfoResponse, OrderBookRequest, OrderBookResponse, OrderBookLevel, VolumeForPriceRequest, PriceForVolumeRequest, QuoteVolumeForPriceRequest, - PriceForQuoteVolumeRequest, VWAPForVolumeRequest, OrderBookQueryResult + PriceForQuoteVolumeRequest, VWAPForVolumeRequest, OrderBookQueryResult, + AddTradingPairRequest, RemoveTradingPairRequest, TradingPairResponse ) -from deps import get_market_data_feed_manager +from deps import get_market_data_service router = APIRouter(tags=["Market Data"], prefix="/market-data") @@ -35,13 +36,13 @@ async def get_candles(request: Request, candles_config: CandlesConfigRequest): Real-time candles data or error message """ try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + market_data_service: MarketDataService = request.app.state.market_data_service # Get or create the candles feed (this will start it automatically and track access time) candles_cfg = CandlesConfig( connector=candles_config.connector_name, trading_pair=candles_config.trading_pair, interval=candles_config.interval, max_records=candles_config.max_records) - candles_feed = market_data_feed_manager.get_candles_feed(candles_cfg) + candles_feed = market_data_service.get_candles_feed(candles_cfg) # Wait for the candles feed to be ready while not candles_feed.ready: @@ -75,7 +76,7 @@ async def get_historical_candles(request: Request, config: HistoricalCandlesConf Historical candles data or error message """ try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + market_data_service: MarketDataService = request.app.state.market_data_service # Create candles config from historical config candles_config = CandlesConfig( @@ -85,7 +86,7 @@ async def get_historical_candles(request: Request, config: HistoricalCandlesConf ) # Get or create the candles feed (this will track access time) - candles = market_data_feed_manager.get_candles_feed(candles_config) + candles = market_data_service.get_candles_feed(candles_config) # Fetch historical candles historical_data = await candles.get_historical_candles(config=config) @@ -112,8 +113,8 @@ async def get_active_feeds(request: Request): Dictionary with active feeds information including last access times and expiration """ try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - return market_data_feed_manager.get_active_feeds_info() + market_data_service: MarketDataService = request.app.state.market_data_service + return market_data_service.get_active_feeds_info() except Exception as e: return {"error": str(e)} @@ -150,7 +151,7 @@ async def get_available_candle_connectors(): @router.post("/prices", response_model=PricesResponse) async def get_prices( request: PriceRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get current prices for specified trading pairs from a connector. @@ -188,7 +189,7 @@ async def get_prices( @router.post("/funding-info", response_model=FundingInfoResponse) async def get_funding_info( request: FundingInfoRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get funding information for a perpetual trading pair. @@ -227,7 +228,7 @@ async def get_funding_info( @router.post("/order-book", response_model=OrderBookResponse) async def get_order_book( request: OrderBookRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get order book snapshot with specified depth. @@ -273,7 +274,7 @@ async def get_order_book( @router.post("/order-book/price-for-volume", response_model=OrderBookQueryResult) async def get_price_for_volume( request: PriceForVolumeRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get the price required to fill a specific volume on the order book. @@ -306,7 +307,7 @@ async def get_price_for_volume( @router.post("/order-book/volume-for-price", response_model=OrderBookQueryResult) async def get_volume_for_price( request: VolumeForPriceRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get the volume available at a specific price level on the order book. @@ -339,7 +340,7 @@ async def get_volume_for_price( @router.post("/order-book/price-for-quote-volume", response_model=OrderBookQueryResult) async def get_price_for_quote_volume( request: PriceForQuoteVolumeRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get the price required to fill a specific quote volume on the order book. @@ -372,7 +373,7 @@ async def get_price_for_quote_volume( @router.post("/order-book/quote-volume-for-price", response_model=OrderBookQueryResult) async def get_quote_volume_for_price( request: QuoteVolumeForPriceRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get the quote volume available at a specific price level on the order book. @@ -405,7 +406,7 @@ async def get_quote_volume_for_price( @router.post("/order-book/vwap-for-volume", response_model=OrderBookQueryResult) async def get_vwap_for_volume( request: VWAPForVolumeRequest, - market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) + market_data_manager: MarketDataService = Depends(get_market_data_service) ): """ Get the VWAP (Volume Weighted Average Price) for a specific volume on the order book. @@ -424,10 +425,10 @@ async def get_vwap_for_volume( request.is_buy, vwap_volume=request.volume ) - + if "error" in result: raise HTTPException(status_code=500, detail=result["error"]) - + return OrderBookQueryResult(**result) except HTTPException: raise @@ -435,3 +436,106 @@ async def get_vwap_for_volume( raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") +# Trading Pair Management Endpoints + +@router.post("/trading-pair/add", response_model=TradingPairResponse) +async def add_trading_pair( + request: AddTradingPairRequest, + market_data_service: MarketDataService = Depends(get_market_data_service) +): + """ + Initialize order book for a trading pair. + + This endpoint dynamically adds a trading pair to a connector's order book tracker. + It uses the best available connector (trading connectors are preferred over data connectors). + + Args: + request: Request with connector name, trading pair, optional account name, and timeout + + Returns: + TradingPairResponse with success status and message + + Raises: + HTTPException: 500 if initialization fails + """ + try: + success = await market_data_service.initialize_order_book( + connector_name=request.connector_name, + trading_pair=request.trading_pair, + account_name=request.account_name, + timeout=request.timeout + ) + + if success: + return TradingPairResponse( + success=True, + connector_name=request.connector_name, + trading_pair=request.trading_pair, + message=f"Order book initialized for {request.trading_pair}" + ) + else: + raise HTTPException( + status_code=500, + detail=f"Failed to initialize order book for {request.trading_pair}" + ) + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, + detail=f"Error initializing order book: {str(e)}" + ) + + +@router.post("/trading-pair/remove", response_model=TradingPairResponse) +async def remove_trading_pair( + request: RemoveTradingPairRequest, + market_data_service: MarketDataService = Depends(get_market_data_service) +): + """ + Remove a trading pair from order book tracking. + + This endpoint removes a trading pair from a connector's order book tracker, + cleaning up resources for pairs that are no longer needed. + + Args: + request: Request with connector name, trading pair, and optional account name + + Returns: + TradingPairResponse with success status and message + + Raises: + HTTPException: 500 if removal fails + """ + try: + success = await market_data_service.remove_trading_pair( + connector_name=request.connector_name, + trading_pair=request.trading_pair, + account_name=request.account_name + ) + + if success: + return TradingPairResponse( + success=True, + connector_name=request.connector_name, + trading_pair=request.trading_pair, + message=f"Trading pair {request.trading_pair} removed" + ) + else: + return TradingPairResponse( + success=False, + connector_name=request.connector_name, + trading_pair=request.trading_pair, + message=f"Trading pair {request.trading_pair} not found or already removed" + ) + + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=500, + detail=f"Error removing trading pair: {str(e)}" + ) + + diff --git a/routers/rate_oracle.py b/routers/rate_oracle.py index 64df979d..36838a3d 100644 --- a/routers/rate_oracle.py +++ b/routers/rate_oracle.py @@ -31,8 +31,8 @@ def get_rate_oracle(request: Request) -> RateOracle: - """Get RateOracle instance from the market data feed manager.""" - return request.app.state.market_data_feed_manager.rate_oracle + """Get RateOracle instance from the market data service.""" + return request.app.state.market_data_service.rate_oracle def get_file_system_util() -> FileSystemUtil: diff --git a/routers/trading.py b/routers/trading.py index 8dc6220f..a7f25d34 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -11,7 +11,7 @@ from pydantic import BaseModel from starlette import status -from deps import get_accounts_service, get_market_data_feed_manager +from deps import get_accounts_service, get_connector_service from models import ( ActiveOrderFilterRequest, FundingPaymentFilterRequest, @@ -33,7 +33,6 @@ async def place_trade( trade_request: TradeRequest, accounts_service: AccountsService = Depends(get_accounts_service), - market_data_manager=Depends(get_market_data_feed_manager), ): """ Place a buy or sell order using a specific account and connector. @@ -41,7 +40,6 @@ async def place_trade( Args: trade_request: Trading request with account, connector, trading pair, type, amount, etc. accounts_service: Injected accounts service - market_data_manager: Market data manager for price fetching Returns: TradeResponse with order ID and trading details @@ -64,7 +62,6 @@ async def place_trade( order_type=order_type_enum, price=trade_request.price, position_action=position_action_enum, - market_data_manager=market_data_manager, ) return TradeResponse( @@ -119,7 +116,11 @@ async def cancel_order( @router.post("/positions", response_model=PaginatedResponse) -async def get_positions(filter_request: PositionFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): +async def get_positions( + filter_request: PositionFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + connector_service = Depends(get_connector_service) +): """ Get current positions across all or filtered perpetual connectors. @@ -137,7 +138,7 @@ async def get_positions(filter_request: PositionFilterRequest, accounts_service: """ try: all_positions = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() + all_connectors = connector_service.get_all_trading_connectors() # Filter accounts accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) @@ -207,7 +208,8 @@ async def get_positions(filter_request: PositionFilterRequest, accounts_service: # Active Orders Management - Real-time from connectors @router.post("/orders/active", response_model=PaginatedResponse) async def get_active_orders( - filter_request: ActiveOrderFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: ActiveOrderFilterRequest, + connector_service = Depends(get_connector_service) ): """ Get active (in-flight) orders across all or filtered accounts and connectors. @@ -226,7 +228,7 @@ async def get_active_orders( """ try: all_active_orders = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() + all_connectors = connector_service.get_all_trading_connectors() # Use filter request values accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) @@ -303,7 +305,11 @@ async def get_active_orders( # Historical Order Management - From registry/database @router.post("/orders/search", response_model=PaginatedResponse) -async def get_orders(filter_request: OrderFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): +async def get_orders( + filter_request: OrderFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + connector_service = Depends(get_connector_service) +): """ Get historical order data across all or filtered accounts from the database/registry. @@ -321,7 +327,7 @@ async def get_orders(filter_request: OrderFilterRequest, accounts_service: Accou accounts_to_check = filter_request.account_names else: # Get all accounts - all_connectors = accounts_service.connector_manager.get_all_connectors() + all_connectors = connector_service.get_all_trading_connectors() accounts_to_check = list(all_connectors.keys()) # Collect orders from all specified accounts @@ -400,7 +406,11 @@ async def get_orders(filter_request: OrderFilterRequest, accounts_service: Accou # Trade History @router.post("/trades", response_model=PaginatedResponse) -async def get_trades(filter_request: TradeFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): +async def get_trades( + filter_request: TradeFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + connector_service = Depends(get_connector_service) +): """ Get trade history across all or filtered accounts with complex filtering. @@ -418,7 +428,7 @@ async def get_trades(filter_request: TradeFilterRequest, accounts_service: Accou accounts_to_check = filter_request.account_names else: # Get all accounts - all_connectors = accounts_service.connector_manager.get_all_connectors() + all_connectors = connector_service.get_all_trading_connectors() accounts_to_check = list(all_connectors.keys()) # Collect trades from all specified accounts @@ -598,7 +608,9 @@ async def set_leverage( @router.post("/funding-payments", response_model=PaginatedResponse) async def get_funding_payments( - filter_request: FundingPaymentFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: FundingPaymentFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + connector_service = Depends(get_connector_service) ): """ Get funding payment history across all or filtered perpetual connectors. @@ -617,7 +629,7 @@ async def get_funding_payments( """ try: all_funding_payments = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() + all_connectors = connector_service.get_all_trading_connectors() # Filter accounts accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py deleted file mode 100644 index 317b3e02..00000000 --- a/services/market_data_feed_manager.py +++ /dev/null @@ -1,601 +0,0 @@ -import asyncio -import time -from typing import Dict, Optional, Callable, List -import logging -from enum import Enum - -from hummingbot.core.rate_oracle.rate_oracle import RateOracle -from hummingbot.data_feed.candles_feed.data_types import CandlesConfig -from hummingbot.data_feed.market_data_provider import MarketDataProvider - - -class FeedType(Enum): - """Types of market data feeds that can be managed.""" - CANDLES = "candles" - ORDER_BOOK = "order_book" - TRADES = "trades" - TICKER = "ticker" - - -class MarketDataFeedManager: - """ - Generic manager for market data feeds lifecycle with automatic cleanup. - - This service wraps the MarketDataProvider and tracks when any type of market data feed - is last accessed. Feeds that haven't been accessed within the specified timeout period - are automatically stopped and cleaned up. - """ - - def __init__(self, market_data_provider: MarketDataProvider, rate_oracle: RateOracle, cleanup_interval: int = 300, feed_timeout: int = 600): - """ - Initialize the MarketDataFeedManager. - - Args: - market_data_provider: The underlying MarketDataProvider instance - cleanup_interval: How often to run cleanup (seconds, default: 5 minutes) - feed_timeout: How long to keep unused feeds alive (seconds, default: 10 minutes) - """ - self.market_data_provider = market_data_provider - self.rate_oracle = rate_oracle - self.cleanup_interval = cleanup_interval - self.feed_timeout = feed_timeout - self.last_access_times: Dict[str, float] = {} - self.feed_configs: Dict[str, tuple] = {} # Store feed configs for cleanup - self._cleanup_task: Optional[asyncio.Task] = None - self._is_running = False - self.logger = logging.getLogger(__name__) - - # Registry of cleanup functions for different feed types - self._cleanup_functions: Dict[FeedType, Callable] = { - FeedType.CANDLES: self._cleanup_candle_feed, - FeedType.ORDER_BOOK: self._cleanup_order_book_feed, - # Add more feed types as needed - } - - def start(self): - """Start the cleanup background task.""" - if not self._is_running: - self._is_running = True - self._cleanup_task = asyncio.create_task(self._cleanup_loop()) - self.rate_oracle.start() - self.logger.info(f"MarketDataFeedManager started with cleanup_interval={self.cleanup_interval}s, feed_timeout={self.feed_timeout}s") - - def stop(self): - """Stop the cleanup background task and all feeds.""" - self._is_running = False - if self._cleanup_task: - self._cleanup_task.cancel() - self._cleanup_task = None - - # Stop all feeds managed by the MarketDataProvider - self.market_data_provider.stop() - self.last_access_times.clear() - self.feed_configs.clear() - self.logger.info("MarketDataFeedManager stopped") - - def get_candles_feed(self, config: CandlesConfig): - """ - Get a candles feed and update its last access time. - - Args: - config: CandlesConfig for the desired feed - - Returns: - Candle feed instance - """ - feed_key = self._generate_feed_key(FeedType.CANDLES, config.connector, config.trading_pair, config.interval) - - # Update last access time and store config for cleanup - self.last_access_times[feed_key] = time.time() - self.feed_configs[feed_key] = (FeedType.CANDLES, config) - - # Get the feed from MarketDataProvider - feed = self.market_data_provider.get_candles_feed(config) - - self.logger.debug(f"Accessed candle feed: {feed_key}") - return feed - - def get_candles_df(self, connector_name: str, trading_pair: str, interval: str, max_records: int = 500): - """ - Get candles dataframe and update access time. - - Args: - connector_name: The connector name - trading_pair: The trading pair - interval: The candle interval - max_records: Maximum number of records - - Returns: - Candles dataframe - """ - config = CandlesConfig( - connector=connector_name, - trading_pair=trading_pair, - interval=interval, - max_records=max_records - ) - - feed_key = self._generate_feed_key(FeedType.CANDLES, connector_name, trading_pair, interval) - self.last_access_times[feed_key] = time.time() - self.feed_configs[feed_key] = (FeedType.CANDLES, config) - - # Use MarketDataProvider's convenience method - df = self.market_data_provider.get_candles_df(connector_name, trading_pair, interval, max_records) - - self.logger.debug(f"Accessed candle data: {feed_key}") - return df - - def get_order_book(self, connector_name: str, trading_pair: str): - """ - Get order book and update access time. - - Args: - connector_name: The connector name - trading_pair: The trading pair - - Returns: - Order book instance - """ - feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) - - # Update last access time - self.last_access_times[feed_key] = time.time() - self.feed_configs[feed_key] = (FeedType.ORDER_BOOK, (connector_name, trading_pair)) - - # Get order book from MarketDataProvider - order_book = self.market_data_provider.get_order_book(connector_name, trading_pair) - - self.logger.debug(f"Accessed order book: {feed_key}") - return order_book - - def get_order_book_snapshot(self, connector_name: str, trading_pair: str): - """ - Get order book snapshot and update access time. - - Args: - connector_name: The connector name - trading_pair: The trading pair - - Returns: - Tuple of bid and ask DataFrames - """ - feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) - - # Update last access time - self.last_access_times[feed_key] = time.time() - self.feed_configs[feed_key] = (FeedType.ORDER_BOOK, (connector_name, trading_pair)) - - # Get order book snapshot from MarketDataProvider - snapshot = self.market_data_provider.get_order_book_snapshot(connector_name, trading_pair) - - self.logger.debug(f"Accessed order book snapshot: {feed_key}") - return snapshot - - async def get_trading_rules(self, connector_name: str, trading_pairs: Optional[List[str]] = None) -> Dict[str, Dict]: - """ - Get trading rules for specified trading pairs from a connector. - - Args: - connector_name: Name of the connector - trading_pairs: List of trading pairs to get rules for. If None, get all available. - - Returns: - Dictionary mapping trading pairs to their trading rules - """ - try: - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - connector = self.market_data_provider._non_trading_connectors[connector_name] - - # Check if trading rules are initialized, if not update them - if not connector.trading_rules or len(connector.trading_rules) == 0: - await connector._update_trading_rules() - - # Get trading rules - if trading_pairs: - # Get rules for specific trading pairs - result = {} - for trading_pair in trading_pairs: - if trading_pair in connector.trading_rules: - rule = connector.trading_rules[trading_pair] - result[trading_pair] = { - "min_order_size": float(rule.min_order_size), - "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, - "min_price_increment": float(rule.min_price_increment), - "min_base_amount_increment": float(rule.min_base_amount_increment), - "min_quote_amount_increment": float(rule.min_quote_amount_increment), - "min_notional_size": float(rule.min_notional_size), - "min_order_value": float(rule.min_order_value), - "max_price_significant_digits": float(rule.max_price_significant_digits), - "supports_limit_orders": rule.supports_limit_orders, - "supports_market_orders": rule.supports_market_orders, - "buy_order_collateral_token": rule.buy_order_collateral_token, - "sell_order_collateral_token": rule.sell_order_collateral_token, - } - else: - result[trading_pair] = {"error": f"Trading pair {trading_pair} not found"} - else: - # Get all trading rules - result = {} - for trading_pair, rule in connector.trading_rules.items(): - result[trading_pair] = { - "min_order_size": float(rule.min_order_size), - "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, - "min_price_increment": float(rule.min_price_increment), - "min_base_amount_increment": float(rule.min_base_amount_increment), - "min_quote_amount_increment": float(rule.min_quote_amount_increment), - "min_notional_size": float(rule.min_notional_size), - "min_order_value": float(rule.min_order_value), - "max_price_significant_digits": float(rule.max_price_significant_digits), - "supports_limit_orders": rule.supports_limit_orders, - "supports_market_orders": rule.supports_market_orders, - "buy_order_collateral_token": rule.buy_order_collateral_token, - "sell_order_collateral_token": rule.sell_order_collateral_token, - } - - self.logger.debug(f"Retrieved trading rules for {connector_name}: {len(result)} pairs") - return result - - except Exception as e: - self.logger.error(f"Error getting trading rules for {connector_name}: {e}") - return {"error": str(e)} - - async def get_prices(self, connector_name: str, trading_pairs: List[str]) -> Dict[str, float]: - """ - Get current prices for specified trading pairs. - - Args: - connector_name: Name of the connector - trading_pairs: List of trading pairs to get prices for - - Returns: - Dictionary mapping trading pairs to their current prices - """ - try: - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - connector = self.market_data_provider._non_trading_connectors[connector_name] - - # Get last traded prices - prices = await connector.get_last_traded_prices(trading_pairs) - - # Convert Decimal to float for JSON serialization - result = {pair: float(price) for pair, price in prices.items()} - - self.logger.debug(f"Retrieved prices for {connector_name}: {len(result)} pairs") - return result - - except Exception as e: - self.logger.error(f"Error getting prices for {connector_name}: {e}") - return {"error": str(e)} - - async def get_funding_info(self, connector_name: str, trading_pair: str) -> Dict: - """ - Get funding information for a perpetual trading pair. - - Args: - connector_name: Name of the connector - trading_pair: Trading pair to get funding info for - - Returns: - Dictionary containing funding information - """ - try: - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - connector = self.market_data_provider._non_trading_connectors[connector_name] - - # Check if this is a perpetual connector and has funding info support - if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: - orderbook_ds = connector._orderbook_ds - - # Get funding info from the order book data source - funding_info = await orderbook_ds.get_funding_info(trading_pair) - - if funding_info: - result = { - "trading_pair": trading_pair, - "funding_rate": float(funding_info.rate) if funding_info.rate else None, - "next_funding_time": float(funding_info.next_funding_utc_timestamp) if funding_info.next_funding_utc_timestamp else None, - "mark_price": float(funding_info.mark_price) if funding_info.mark_price else None, - "index_price": float(funding_info.index_price) if funding_info.index_price else None, - } - - self.logger.debug(f"Retrieved funding info for {connector_name}/{trading_pair}") - return result - else: - return {"error": f"No funding info available for {trading_pair}"} - else: - return {"error": f"Funding info not supported for {connector_name}"} - - except Exception as e: - self.logger.error(f"Error getting funding info for {connector_name}/{trading_pair}: {e}") - return {"error": str(e)} - - async def get_order_book_data(self, connector_name: str, trading_pair: str, depth: int = 10) -> Dict: - """ - Get order book data using the connector's order book data source. - - Args: - connector_name: Name of the connector - trading_pair: Trading pair to get order book for - depth: Number of bid/ask levels to return - - Returns: - Dictionary containing bid and ask data - """ - try: - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - connector = self.market_data_provider._non_trading_connectors[connector_name] - - # Access the order book data source - if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: - orderbook_ds = connector._orderbook_ds - - # Get new order book using the data source method - order_book = await orderbook_ds.get_new_order_book(trading_pair) - snapshot = order_book.snapshot - - result = { - "trading_pair": trading_pair, - "bids": snapshot[0].loc[:(depth - 1), ["price", "amount"]].values.tolist(), - "asks": snapshot[1].loc[:(depth - 1), ["price", "amount"]].values.tolist(), - "timestamp": time.time() - } - - self.logger.debug(f"Retrieved order book for {connector_name}/{trading_pair}") - return result - else: - return {"error": f"Order book data source not available for {connector_name}"} - - except Exception as e: - self.logger.error(f"Error getting order book for {connector_name}/{trading_pair}: {e}") - return {"error": str(e)} - - async def get_order_book_query_result(self, connector_name: str, trading_pair: str, is_buy: bool, **kwargs) -> Dict: - """ - Generic method for order book queries using fresh OrderBook from data source. - - Args: - connector_name: Name of the connector - trading_pair: Trading pair - is_buy: True for buy side, False for sell side - **kwargs: Additional parameters for specific query types - - Returns: - Dictionary containing query results - """ - try: - current_time = time.time() - - # Access connector through MarketDataProvider's _non_trading_connectors LazyDict - connector = self.market_data_provider._non_trading_connectors[connector_name] - - # Access the order book data source - if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: - orderbook_ds = connector._orderbook_ds - - # Get fresh order book using the data source method - order_book = await orderbook_ds.get_new_order_book(trading_pair) - - if 'volume' in kwargs: - # Get price for volume - result = order_book.get_price_for_volume(is_buy, kwargs['volume']) - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_volume": kwargs['volume'], - "result_price": float(result.result_price) if result.result_price else None, - "result_volume": float(result.result_volume) if result.result_volume else None, - "timestamp": current_time - } - - elif 'price' in kwargs: - # Get volume for price - result = order_book.get_volume_for_price(is_buy, kwargs['price']) - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_price": kwargs['price'], - "result_volume": float(result.result_volume) if result.result_volume else None, - "result_price": float(result.result_price) if result.result_price else None, - "timestamp": current_time - } - - elif 'quote_volume' in kwargs: - # Get price for quote volume - result = order_book.get_price_for_quote_volume(is_buy, kwargs['quote_volume']) - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_quote_volume": kwargs['quote_volume'], - "result_price": float(result.result_price) if result.result_price else None, - "result_volume": float(result.result_volume) if result.result_volume else None, - "timestamp": current_time - } - - elif 'quote_price' in kwargs: - # Get quote volume for price - result = order_book.get_quote_volume_for_price(is_buy, kwargs['quote_price']) - - # Check if quote crosses the book (no available volume at this price) - if result.result_volume is None or result.result_price is None: - # Get current market prices for comparison - snapshot = order_book.snapshot - best_bid = float(snapshot[0].iloc[0]["price"]) if not snapshot[0].empty else None - best_ask = float(snapshot[1].iloc[0]["price"]) if not snapshot[1].empty else None - mid_price = (best_bid + best_ask) / 2 if best_bid and best_ask else None - - # Determine if quote crosses the book - query_price = float(kwargs['quote_price']) - crossed_reason = None - suggested_price = None - - if is_buy: - # For buy orders, crossing occurs when price > best_ask - if best_ask and query_price > best_ask: - crossed_reason = f"Buy price {query_price} exceeds best ask {best_ask}" - suggested_price = best_ask - elif best_bid and query_price < best_bid: - crossed_reason = f"Buy price {query_price} below best bid {best_bid} - no liquidity available" - suggested_price = best_bid - else: - # For sell orders, crossing occurs when price < best_bid - if best_bid and query_price < best_bid: - crossed_reason = f"Sell price {query_price} below best bid {best_bid}" - suggested_price = best_bid - elif best_ask and query_price > best_ask: - crossed_reason = f"Sell price {query_price} above best ask {best_ask} - no liquidity available" - suggested_price = best_ask - - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_price": query_price, - "result_volume": None, - "result_quote_volume": None, - "crossed_book": True, - "crossed_reason": crossed_reason, - "best_bid": best_bid, - "best_ask": best_ask, - "mid_price": mid_price, - "suggested_price": suggested_price, - "timestamp": current_time - } - - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_price": kwargs['quote_price'], - "result_quote_volume": float(result.result_volume) if result.result_volume else None, - "crossed_book": False, - "timestamp": current_time - } - - elif 'vwap_volume' in kwargs: - # Get VWAP for volume - result = order_book.get_vwap_for_volume(is_buy, kwargs['vwap_volume']) - return { - "trading_pair": trading_pair, - "is_buy": is_buy, - "query_volume": kwargs['vwap_volume'], - "average_price": float(result.result_price) if result.result_price else None, - "result_volume": float(result.result_volume) if result.result_volume else None, - "timestamp": current_time - } - else: - return {"error": "Invalid query parameters"} - else: - return {"error": f"Order book data source not available for {connector_name}"} - - except Exception as e: - self.logger.error(f"Error in order book query for {connector_name}/{trading_pair}: {e}") - return {"error": str(e)} - - async def _cleanup_loop(self): - """Background task that periodically cleans up unused feeds.""" - while self._is_running: - try: - await self._cleanup_unused_feeds() - await asyncio.sleep(self.cleanup_interval) - except asyncio.CancelledError: - break - except Exception as e: - self.logger.error(f"Error in cleanup loop: {e}", exc_info=True) - await asyncio.sleep(self.cleanup_interval) - - async def _cleanup_unused_feeds(self): - """Clean up feeds that haven't been accessed within the timeout period.""" - current_time = time.time() - feeds_to_remove = [] - - for feed_key, last_access_time in self.last_access_times.items(): - if current_time - last_access_time > self.feed_timeout: - feeds_to_remove.append(feed_key) - - for feed_key in feeds_to_remove: - try: - # Get feed type and config - feed_type, config = self.feed_configs[feed_key] - - # Use appropriate cleanup function - cleanup_func = self._cleanup_functions.get(feed_type) - if cleanup_func: - cleanup_func(config) - - # Remove from tracking - del self.last_access_times[feed_key] - del self.feed_configs[feed_key] - - self.logger.info(f"Cleaned up unused {feed_type.value} feed: {feed_key}") - - except Exception as e: - self.logger.error(f"Error cleaning up feed {feed_key}: {e}", exc_info=True) - - if feeds_to_remove: - self.logger.info(f"Cleaned up {len(feeds_to_remove)} unused market data feeds") - - def _cleanup_candle_feed(self, config: CandlesConfig): - """Clean up a candle feed.""" - self.market_data_provider.stop_candle_feed(config) - - def _cleanup_order_book_feed(self, config: tuple): - """Clean up an order book feed.""" - # Order books are typically managed by connectors, so we might not need explicit cleanup - # This is a placeholder for future implementation if needed - pass - - def _generate_feed_key(self, feed_type: FeedType, connector: str, trading_pair: str, interval: str = None) -> str: - """Generate a unique key for a market data feed.""" - if interval: - return f"{feed_type.value}_{connector}_{trading_pair}_{interval}" - else: - return f"{feed_type.value}_{connector}_{trading_pair}" - - def get_active_feeds_info(self) -> Dict[str, dict]: - """ - Get information about currently active feeds. - - Returns: - Dictionary with feed information including last access times and feed types - """ - current_time = time.time() - result = {} - - for feed_key, last_access in self.last_access_times.items(): - feed_type, config = self.feed_configs.get(feed_key, (None, None)) - result[feed_key] = { - "feed_type": feed_type.value if feed_type else "unknown", - "last_access_time": last_access, - "seconds_since_access": current_time - last_access, - "will_expire_in": max(0, self.feed_timeout - (current_time - last_access)), - "config": str(config) # String representation of config - } - - return result - - def manually_cleanup_feed(self, feed_type: FeedType, connector: str, trading_pair: str, interval: str = None): - """ - Manually cleanup a specific feed. - - Args: - feed_type: Type of feed to cleanup - connector: Connector name - trading_pair: Trading pair - interval: Interval (for candles only) - """ - feed_key = self._generate_feed_key(feed_type, connector, trading_pair, interval) - - if feed_key in self.feed_configs: - feed_type_obj, config = self.feed_configs[feed_key] - cleanup_func = self._cleanup_functions.get(feed_type_obj) - - if cleanup_func: - try: - cleanup_func(config) - del self.last_access_times[feed_key] - del self.feed_configs[feed_key] - self.logger.info(f"Manually cleaned up feed: {feed_key}") - except Exception as e: - self.logger.error(f"Error manually cleaning up feed {feed_key}: {e}", exc_info=True) - else: - self.logger.warning(f"No cleanup function for feed type: {feed_type}") - else: - self.logger.warning(f"Feed not found for cleanup: {feed_key}") \ No newline at end of file diff --git a/services/market_data_service.py b/services/market_data_service.py new file mode 100644 index 00000000..e24bd39f --- /dev/null +++ b/services/market_data_service.py @@ -0,0 +1,701 @@ +""" +Market Data Service - Centralized market data access with proper connector integration. + +This service provides access to market data (candles, order books, prices, trading rules) +using the UnifiedConnectorService to ensure proper connector usage. +""" +import asyncio +import time +import logging +from typing import Dict, Optional, List, Any, Tuple +from decimal import Decimal +from enum import Enum + +from hummingbot.core.rate_oracle.rate_oracle import RateOracle +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory + + +logger = logging.getLogger(__name__) + + +class FeedType(Enum): + """Types of market data feeds that can be managed.""" + CANDLES = "candles" + ORDER_BOOK = "order_book" + TRADES = "trades" + TICKER = "ticker" + + +class MarketDataService: + """ + Centralized market data service using UnifiedConnectorService. + + This service manages: + - Candles feeds with automatic lifecycle management + - Order book access via UnifiedConnectorService + - Price and trading rules queries + - Feed cleanup for unused data streams + """ + + def __init__( + self, + connector_service: "UnifiedConnectorService", + rate_oracle: RateOracle, + cleanup_interval: int = 300, + feed_timeout: int = 600 + ): + """ + Initialize the MarketDataService. + + Args: + connector_service: UnifiedConnectorService for connector access + rate_oracle: RateOracle instance for price conversions + cleanup_interval: How often to run cleanup (seconds, default: 5 minutes) + feed_timeout: How long to keep unused feeds alive (seconds, default: 10 minutes) + """ + self._connector_service = connector_service + self._rate_oracle = rate_oracle + self._cleanup_interval = cleanup_interval + self._feed_timeout = feed_timeout + + # Candle feeds management + self._candle_feeds: Dict[str, Any] = {} + self._last_access_times: Dict[str, float] = {} + self._feed_configs: Dict[str, Tuple[FeedType, Any]] = {} + + # Background tasks + self._cleanup_task: Optional[asyncio.Task] = None + self._is_running = False + + logger.info("MarketDataService initialized") + + # ==================== Lifecycle ==================== + + def start(self): + """Start the market data service.""" + if not self._is_running: + self._is_running = True + self._cleanup_task = asyncio.create_task(self._cleanup_loop()) + self._rate_oracle.start() + logger.info( + f"MarketDataService started with cleanup_interval={self._cleanup_interval}s, " + f"feed_timeout={self._feed_timeout}s" + ) + + def stop(self): + """Stop the market data service and cleanup all feeds.""" + self._is_running = False + + if self._cleanup_task: + self._cleanup_task.cancel() + self._cleanup_task = None + + # Stop all candle feeds + for feed_key, feed in self._candle_feeds.items(): + try: + feed.stop() + except Exception as e: + logger.error(f"Error stopping candle feed {feed_key}: {e}") + + self._candle_feeds.clear() + self._last_access_times.clear() + self._feed_configs.clear() + + logger.info("MarketDataService stopped") + + # ==================== Order Book Access ==================== + + async def initialize_order_book( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None, + timeout: float = 30.0 + ) -> bool: + """ + Initialize an order book for a trading pair. + + Uses the UnifiedConnectorService to get the best available connector + (prefers trading connectors which already have order book trackers running). + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair (e.g., "SOL-FDUSD") + account_name: Optional account name for trading connector preference + timeout: Timeout for waiting for order book to be ready + + Returns: + True if order book is ready, False otherwise + """ + return await self._connector_service.initialize_order_book( + connector_name=connector_name, + trading_pair=trading_pair, + account_name=account_name, + timeout=timeout + ) + + async def remove_trading_pair( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None + ) -> bool: + """ + Remove a trading pair from order book tracking. + + Cleans up order book resources for a trading pair that is no longer needed. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair to remove + account_name: Optional account name for trading connector preference + + Returns: + True if successfully removed, False otherwise + """ + # Clean up our local tracking for this feed + feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) + self._last_access_times.pop(feed_key, None) + self._feed_configs.pop(feed_key, None) + + return await self._connector_service.remove_trading_pair( + connector_name=connector_name, + trading_pair=trading_pair, + account_name=account_name + ) + + def get_order_book(self, connector_name: str, trading_pair: str, account_name: Optional[str] = None): + """ + Get order book for a trading pair. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + account_name: Optional account name for trading connector preference + + Returns: + OrderBook instance or None + """ + feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) + self._last_access_times[feed_key] = time.time() + self._feed_configs[feed_key] = (FeedType.ORDER_BOOK, (connector_name, trading_pair)) + + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if connector and hasattr(connector, 'order_book_tracker'): + tracker = connector.order_book_tracker + if tracker and trading_pair in tracker.order_books: + return tracker.order_books[trading_pair] + + logger.warning(f"No order book found for {connector_name}/{trading_pair}") + return None + + def get_order_book_snapshot( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None + ) -> Optional[Tuple]: + """ + Get order book snapshot (bids, asks DataFrames). + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + account_name: Optional account name for trading connector preference + + Returns: + Tuple of (bids_df, asks_df) or None + """ + order_book = self.get_order_book(connector_name, trading_pair, account_name) + if order_book: + try: + return order_book.snapshot + except Exception as e: + logger.error(f"Error getting order book snapshot: {e}") + return None + + async def get_order_book_data( + self, + connector_name: str, + trading_pair: str, + depth: int = 10, + account_name: Optional[str] = None + ) -> Dict: + """ + Get order book data as a dictionary. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + depth: Number of bid/ask levels to return + account_name: Optional account name for trading connector preference + + Returns: + Dictionary with bids, asks, and metadata + """ + try: + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if not connector: + return {"error": f"No connector available for {connector_name}"} + + # Try to get from existing order book tracker + if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: + tracker = connector.order_book_tracker + if trading_pair in tracker.order_books: + order_book = tracker.order_books[trading_pair] + snapshot = order_book.snapshot + + return { + "trading_pair": trading_pair, + "bids": snapshot[0].head(depth)[["price", "amount"]].values.tolist(), + "asks": snapshot[1].head(depth)[["price", "amount"]].values.tolist(), + "timestamp": time.time() + } + + # Fallback to getting fresh order book from data source + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + orderbook_ds = connector._orderbook_ds + order_book = await orderbook_ds.get_new_order_book(trading_pair) + snapshot = order_book.snapshot + + return { + "trading_pair": trading_pair, + "bids": snapshot[0].head(depth)[["price", "amount"]].values.tolist(), + "asks": snapshot[1].head(depth)[["price", "amount"]].values.tolist(), + "timestamp": time.time() + } + + return {"error": f"Order book not available for {connector_name}/{trading_pair}"} + + except Exception as e: + logger.error(f"Error getting order book data for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + + async def get_order_book_query_result( + self, + connector_name: str, + trading_pair: str, + is_buy: bool, + account_name: Optional[str] = None, + **kwargs + ) -> Dict: + """ + Query order book for price/volume calculations. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + is_buy: True for buy side, False for sell side + account_name: Optional account name + **kwargs: Query parameters (volume, price, quote_volume, etc.) + + Returns: + Query result dictionary + """ + try: + current_time = time.time() + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if not connector: + return {"error": f"No connector available for {connector_name}"} + + # Get order book + order_book = None + if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: + tracker = connector.order_book_tracker + if trading_pair in tracker.order_books: + order_book = tracker.order_books[trading_pair] + + if not order_book and hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + order_book = await connector._orderbook_ds.get_new_order_book(trading_pair) + + if not order_book: + return {"error": f"No order book available for {connector_name}/{trading_pair}"} + + # Process query + if 'volume' in kwargs: + result = order_book.get_price_for_volume(is_buy, kwargs['volume']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_volume": kwargs['volume'], + "result_price": float(result.result_price) if result.result_price else None, + "result_volume": float(result.result_volume) if result.result_volume else None, + "timestamp": current_time + } + + elif 'price' in kwargs: + result = order_book.get_volume_for_price(is_buy, kwargs['price']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_price": kwargs['price'], + "result_volume": float(result.result_volume) if result.result_volume else None, + "result_price": float(result.result_price) if result.result_price else None, + "timestamp": current_time + } + + elif 'vwap_volume' in kwargs: + result = order_book.get_vwap_for_volume(is_buy, kwargs['vwap_volume']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_volume": kwargs['vwap_volume'], + "average_price": float(result.result_price) if result.result_price else None, + "result_volume": float(result.result_volume) if result.result_volume else None, + "timestamp": current_time + } + + else: + return {"error": "Invalid query parameters"} + + except Exception as e: + logger.error(f"Error in order book query for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + + # ==================== Candles ==================== + + def get_candles_feed(self, config: CandlesConfig): + """ + Get or create a candles feed. + + Args: + config: CandlesConfig for the desired feed + + Returns: + Candle feed instance + """ + feed_key = self._generate_feed_key( + FeedType.CANDLES, config.connector, config.trading_pair, config.interval + ) + + self._last_access_times[feed_key] = time.time() + self._feed_configs[feed_key] = (FeedType.CANDLES, config) + + if feed_key not in self._candle_feeds: + feed = CandlesFactory.get_candle(config) + feed.start() + self._candle_feeds[feed_key] = feed + logger.info(f"Created candle feed: {feed_key}") + + return self._candle_feeds[feed_key] + + def get_candles_df( + self, + connector_name: str, + trading_pair: str, + interval: str, + max_records: int = 500 + ): + """ + Get candles dataframe. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + interval: Candle interval + max_records: Maximum number of records + + Returns: + Pandas DataFrame with candle data + """ + config = CandlesConfig( + connector=connector_name, + trading_pair=trading_pair, + interval=interval, + max_records=max_records + ) + + feed = self.get_candles_feed(config) + return feed.candles_df + + def stop_candle_feed(self, config: CandlesConfig): + """Stop a specific candle feed.""" + feed_key = self._generate_feed_key( + FeedType.CANDLES, config.connector, config.trading_pair, config.interval + ) + + if feed_key in self._candle_feeds: + try: + self._candle_feeds[feed_key].stop() + del self._candle_feeds[feed_key] + logger.info(f"Stopped candle feed: {feed_key}") + except Exception as e: + logger.error(f"Error stopping candle feed {feed_key}: {e}") + + # ==================== Prices ==================== + + async def get_prices( + self, + connector_name: str, + trading_pairs: List[str], + account_name: Optional[str] = None + ) -> Dict[str, float]: + """ + Get current prices for trading pairs. + + Args: + connector_name: Exchange connector name + trading_pairs: List of trading pairs + account_name: Optional account name for trading connector preference + + Returns: + Dictionary mapping trading pairs to prices + """ + try: + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if not connector: + return {"error": f"No connector available for {connector_name}"} + + prices = await connector.get_last_traded_prices(trading_pairs) + return {pair: float(price) for pair, price in prices.items()} + + except Exception as e: + logger.error(f"Error getting prices for {connector_name}: {e}") + return {"error": str(e)} + + def get_rate(self, base: str, quote: str = "USD") -> Optional[Decimal]: + """ + Get exchange rate from rate oracle. + + Args: + base: Base currency + quote: Quote currency (default: USD) + + Returns: + Exchange rate or None + """ + try: + return self._rate_oracle.get_pair_rate(f"{base}-{quote}") + except Exception as e: + logger.debug(f"Rate not available for {base}-{quote}: {e}") + return None + + # ==================== Trading Rules ==================== + + async def get_trading_rules( + self, + connector_name: str, + trading_pairs: Optional[List[str]] = None, + account_name: Optional[str] = None + ) -> Dict[str, Dict]: + """ + Get trading rules for trading pairs. + + Args: + connector_name: Exchange connector name + trading_pairs: List of trading pairs (None for all) + account_name: Optional account name + + Returns: + Dictionary mapping trading pairs to their rules + """ + try: + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if not connector: + return {"error": f"No connector available for {connector_name}"} + + # Ensure trading rules are loaded + if not connector.trading_rules or len(connector.trading_rules) == 0: + await connector._update_trading_rules() + + result = {} + rules_to_process = trading_pairs if trading_pairs else connector.trading_rules.keys() + + for trading_pair in rules_to_process: + if trading_pair in connector.trading_rules: + rule = connector.trading_rules[trading_pair] + result[trading_pair] = { + "min_order_size": float(rule.min_order_size), + "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, + "min_price_increment": float(rule.min_price_increment), + "min_base_amount_increment": float(rule.min_base_amount_increment), + "min_quote_amount_increment": float(rule.min_quote_amount_increment), + "min_notional_size": float(rule.min_notional_size), + "min_order_value": float(rule.min_order_value), + "max_price_significant_digits": float(rule.max_price_significant_digits), + "supports_limit_orders": rule.supports_limit_orders, + "supports_market_orders": rule.supports_market_orders, + "buy_order_collateral_token": rule.buy_order_collateral_token, + "sell_order_collateral_token": rule.sell_order_collateral_token, + } + elif trading_pairs: + result[trading_pair] = {"error": f"Trading pair {trading_pair} not found"} + + return result + + except Exception as e: + logger.error(f"Error getting trading rules for {connector_name}: {e}") + return {"error": str(e)} + + # ==================== Funding Info ==================== + + async def get_funding_info( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None + ) -> Dict: + """ + Get funding information for perpetual trading pairs. + + Args: + connector_name: Exchange connector name + trading_pair: Trading pair + account_name: Optional account name + + Returns: + Dictionary with funding information + """ + try: + connector = self._connector_service.get_best_connector_for_market( + connector_name, account_name + ) + + if not connector: + return {"error": f"No connector available for {connector_name}"} + + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + orderbook_ds = connector._orderbook_ds + funding_info = await orderbook_ds.get_funding_info(trading_pair) + + if funding_info: + return { + "trading_pair": trading_pair, + "funding_rate": float(funding_info.rate) if funding_info.rate else None, + "next_funding_time": float(funding_info.next_funding_utc_timestamp) if funding_info.next_funding_utc_timestamp else None, + "mark_price": float(funding_info.mark_price) if funding_info.mark_price else None, + "index_price": float(funding_info.index_price) if funding_info.index_price else None, + } + else: + return {"error": f"No funding info available for {trading_pair}"} + else: + return {"error": f"Funding info not supported for {connector_name}"} + + except Exception as e: + logger.error(f"Error getting funding info for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + + # ==================== Feed Management ==================== + + def get_active_feeds_info(self) -> Dict[str, dict]: + """Get information about active feeds.""" + current_time = time.time() + result = {} + + for feed_key, last_access in self._last_access_times.items(): + feed_type, config = self._feed_configs.get(feed_key, (None, None)) + result[feed_key] = { + "feed_type": feed_type.value if feed_type else "unknown", + "last_access_time": last_access, + "seconds_since_access": current_time - last_access, + "will_expire_in": max(0, self._feed_timeout - (current_time - last_access)), + "config": str(config) + } + + return result + + def manually_cleanup_feed( + self, + feed_type: FeedType, + connector: str, + trading_pair: str, + interval: str = None + ): + """Manually cleanup a specific feed.""" + feed_key = self._generate_feed_key(feed_type, connector, trading_pair, interval) + + if feed_key in self._feed_configs: + try: + if feed_type == FeedType.CANDLES and feed_key in self._candle_feeds: + self._candle_feeds[feed_key].stop() + del self._candle_feeds[feed_key] + + del self._last_access_times[feed_key] + del self._feed_configs[feed_key] + logger.info(f"Manually cleaned up feed: {feed_key}") + except Exception as e: + logger.error(f"Error manually cleaning up feed {feed_key}: {e}") + else: + logger.warning(f"Feed not found for cleanup: {feed_key}") + + # ==================== Internal ==================== + + async def _cleanup_loop(self): + """Background task to cleanup unused feeds.""" + while self._is_running: + try: + await self._cleanup_unused_feeds() + await asyncio.sleep(self._cleanup_interval) + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Error in cleanup loop: {e}", exc_info=True) + await asyncio.sleep(self._cleanup_interval) + + async def _cleanup_unused_feeds(self): + """Clean up feeds that haven't been accessed within timeout.""" + current_time = time.time() + feeds_to_remove = [] + + for feed_key, last_access_time in self._last_access_times.items(): + if current_time - last_access_time > self._feed_timeout: + feeds_to_remove.append(feed_key) + + for feed_key in feeds_to_remove: + try: + feed_type, config = self._feed_configs[feed_key] + + if feed_type == FeedType.CANDLES and feed_key in self._candle_feeds: + self._candle_feeds[feed_key].stop() + del self._candle_feeds[feed_key] + + del self._last_access_times[feed_key] + del self._feed_configs[feed_key] + + logger.info(f"Cleaned up unused {feed_type.value} feed: {feed_key}") + + except Exception as e: + logger.error(f"Error cleaning up feed {feed_key}: {e}", exc_info=True) + + if feeds_to_remove: + logger.info(f"Cleaned up {len(feeds_to_remove)} unused market data feeds") + + def _generate_feed_key( + self, + feed_type: FeedType, + connector: str, + trading_pair: str, + interval: str = None + ) -> str: + """Generate a unique key for a market data feed.""" + if interval: + return f"{feed_type.value}_{connector}_{trading_pair}_{interval}" + return f"{feed_type.value}_{connector}_{trading_pair}" + + # ==================== Properties ==================== + + @property + def rate_oracle(self) -> RateOracle: + """Get the rate oracle instance.""" + return self._rate_oracle + + @property + def connector_service(self) -> "UnifiedConnectorService": + """Get the connector service instance.""" + return self._connector_service From 78d68ecd4f72daea0aacdc751f69b61fe8c62cee Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:27:17 -0300 Subject: [PATCH 07/20] (feat) simplify accounts service --- services/accounts_service.py | 742 ++++++++++++++++++++++++++--------- 1 file changed, 551 insertions(+), 191 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 82a1e6ae..9a7a883e 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,26 +1,471 @@ import asyncio import logging +import time from datetime import datetime, timezone from decimal import Decimal -from typing import Dict, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional, Set from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger +from hummingbot.connector.connector_base import ConnectorBase from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction, PositionMode -from hummingbot.strategy_v2.executors.data_types import ConnectorPair from config import settings from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, FundingRepository -from services.market_data_feed_manager import MarketDataFeedManager from services.gateway_client import GatewayClient from services.gateway_transaction_poller import GatewayTransactionPoller -from utils.connector_manager import ConnectorManager from utils.file_system import fs_util # Create module-specific logger logger = logging.getLogger(__name__) +class AccountTradingInterface: + """ + ScriptStrategyBase-compatible interface for executor trading. + + This class provides the exact interface that Hummingbot executors expect + from a strategy object, backed by AccountsService resources. + + IMPORTANT: This class does NOT maintain its own connector cache. Instead, it + uses the shared ConnectorManager via AccountsService which is the single source + of truth for all connector instances. + + Executors use the following interface from strategy: + - current_timestamp: float property + - buy(connector_name, trading_pair, amount, order_type, price, position_action) -> str + - sell(connector_name, trading_pair, amount, order_type, price, position_action) -> str + - cancel(connector_name, trading_pair, order_id) -> str + - get_active_orders(connector_name) -> List + + ExecutorBase also accesses: + - connectors: Dict[str, ConnectorBase] (accessed directly in ExecutorBase.__init__) + """ + + def __init__( + self, + accounts_service: 'AccountsService', + account_name: str + ): + """ + Initialize AccountTradingInterface. + + Args: + accounts_service: AccountsService instance for connector access + account_name: Account to use for connectors + """ + self._accounts_service = accounts_service + self._account_name = account_name + + # Track active markets (connector_name -> set of trading_pairs) + self._markets: Dict[str, Set[str]] = {} + + # Timestamp tracking + self._current_timestamp: float = time.time() + + # Lock for async operations + self._lock = asyncio.Lock() + + @property + def account_name(self) -> str: + """Return the account name for this trading interface.""" + return self._account_name + + @property + def connectors(self) -> Dict[str, ConnectorBase]: + """ + Return connectors for this account from the connector service. + + This returns the actual connectors that are already initialized and running, + avoiding any duplicate caching or connector management. + """ + if not self._accounts_service._connector_service: + return {} + all_connectors = self._accounts_service._connector_service.get_all_trading_connectors() + return all_connectors.get(self._account_name, {}) + + @property + def markets(self) -> Dict[str, Set[str]]: + """Return active markets configuration.""" + return self._markets + + @property + def current_timestamp(self) -> float: + """Return current timestamp (updated by control loop).""" + return self._current_timestamp + + def update_timestamp(self): + """Update the current timestamp. Called by ExecutorService control loop.""" + self._current_timestamp = time.time() + + async def ensure_connector(self, connector_name: str) -> ConnectorBase: + """ + Ensure connector is loaded and available. + + This method uses the connector service which already caches connectors. + It also ensures the MarketDataProvider has access to the connector for + order book initialization. + + Args: + connector_name: Name of the connector + + Returns: + The connector instance + """ + # Get connector from connector service (already cached there) + connector = await self._accounts_service._connector_service.get_trading_connector( + self._account_name, + connector_name + ) + return connector + + async def add_market( + self, + connector_name: str, + trading_pair: str, + order_book_timeout: float = 10.0 + ): + """ + Add a trading pair to active markets with full order book support. + + This method ensures: + 1. Connector is loaded + 2. Order book is initialized and has valid data + 3. Rate sources are initialized for price feeds + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to add + order_book_timeout: Timeout in seconds to wait for order book data + """ + await self.ensure_connector(connector_name) + + if connector_name not in self._markets: + self._markets[connector_name] = set() + + # Check if already tracking this pair + if trading_pair in self._markets[connector_name]: + logger.debug(f"Market {connector_name}/{trading_pair} already active") + return + + self._markets[connector_name].add(trading_pair) + + # Get connector and its order book tracker + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not available. Check credentials.") + tracker = connector.order_book_tracker + + # Check if order book already exists, if not initialize it dynamically + if trading_pair in tracker.order_books: + logger.debug(f"Order book already exists for {connector_name}/{trading_pair}") + else: + logger.debug(f"Order book not found for {connector_name}/{trading_pair}, initializing dynamically") + market_data_service = self._accounts_service._market_data_service + if market_data_service: + try: + success = await market_data_service.initialize_order_book( + connector_name, trading_pair, + account_name=self._account_name, + timeout=order_book_timeout + ) + if not success: + logger.warning(f"Order book for {connector_name}/{trading_pair} not ready after timeout") + except Exception as e: + logger.warning(f"Exception initializing order book: {e}") + + # Register the trading pair with the connector + self._register_trading_pair_with_connector(connector, trading_pair) + + async def _wait_for_order_book_ready( + self, + tracker, + trading_pair: str, + timeout: float = 30.0 + ) -> bool: + """ + Wait for an order book to have valid data. + + Args: + tracker: Order book tracker instance + trading_pair: Trading pair to wait for + timeout: Maximum time to wait in seconds + + Returns: + True if order book is ready, False if timeout + """ + import asyncio + waited = 0 + interval = 0.5 + while waited < timeout: + if trading_pair in tracker.order_books: + ob = tracker.order_books[trading_pair] + try: + bids, asks = ob.snapshot + if len(bids) > 0 and len(asks) > 0: + logger.info(f"Order book for {trading_pair} is ready with {len(bids)} bids and {len(asks)} asks") + return True + except Exception: + pass + await asyncio.sleep(interval) + waited += interval + logger.warning(f"Timeout waiting for {trading_pair} order book to be ready") + return False + + def _register_trading_pair_with_connector( + self, + connector: ConnectorBase, + trading_pair: str + ): + """ + Register a trading pair with the connector's internal structures. + + This is needed for methods like get_order_book() to work properly. + Different connector types may store trading pairs differently. + + Args: + connector: The connector instance + trading_pair: Trading pair to register + """ + logger.info(f"Registering {trading_pair} with connector {type(connector).__name__}") + + # Add to connector's _trading_pairs if it exists + if hasattr(connector, '_trading_pairs'): + tp_type = type(connector._trading_pairs).__name__ + logger.info(f"Connector has _trading_pairs of type: {tp_type}") + + if isinstance(connector._trading_pairs, set): + connector._trading_pairs.add(trading_pair) + logger.info(f"Added {trading_pair} to connector._trading_pairs (set)") + elif isinstance(connector._trading_pairs, list): + if trading_pair not in connector._trading_pairs: + connector._trading_pairs.append(trading_pair) + logger.info(f"Added {trading_pair} to connector._trading_pairs (list)") + elif isinstance(connector._trading_pairs, dict): + # For paper trade or similar connectors that use a dict + if trading_pair not in connector._trading_pairs: + base, quote = trading_pair.split("-") + # Import TradingPair if needed for paper trade + try: + from hummingbot.connector.exchange.paper_trade.trading_pair import TradingPair + connector._trading_pairs[trading_pair] = TradingPair( + trading_pair=f"{base}{quote}", + base_asset=base, + quote_asset=quote + ) + logger.info(f"Added {trading_pair} to connector._trading_pairs (dict)") + except ImportError: + connector._trading_pairs[trading_pair] = trading_pair + logger.info(f"Added {trading_pair} to connector._trading_pairs (dict, simple)") + else: + logger.warning(f"Connector {type(connector).__name__} does not have _trading_pairs attribute") + + # Also check if order_book_tracker has the pair + if hasattr(connector, 'order_book_tracker'): + tracker = connector.order_book_tracker + has_ob = trading_pair in tracker.order_books if hasattr(tracker, 'order_books') else False + logger.info(f"Order book tracker has {trading_pair}: {has_ob}") + + async def remove_market( + self, + connector_name: str, + trading_pair: str, + remove_order_book: bool = True + ): + """ + Remove a trading pair from active markets and optionally cleanup order book. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to remove + remove_order_book: Whether to remove the order book (default True) + """ + if connector_name not in self._markets: + return + + self._markets[connector_name].discard(trading_pair) + if not self._markets[connector_name]: + del self._markets[connector_name] + + # Remove order book if requested + if remove_order_book: + market_data_service = self._accounts_service._market_data_service + if market_data_service: + try: + success = await market_data_service.remove_trading_pair( + connector_name, + trading_pair, + account_name=self._account_name + ) + if success: + logger.info(f"Removed order book for {connector_name}/{trading_pair}") + else: + logger.debug(f"Order book for {trading_pair} was not being tracked") + except Exception as e: + logger.warning(f"Failed to remove order book for {trading_pair}: {e}") + + # ======================================== + # ScriptStrategyBase-compatible methods + # These are called by executors via self._strategy.method() + # ======================================== + + def buy( + self, + connector_name: str, + trading_pair: str, + amount: Decimal, + order_type: OrderType, + price: Decimal = Decimal("NaN"), + position_action: PositionAction = PositionAction.NIL + ) -> str: + """ + Place a buy order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + amount: Order amount in base currency + order_type: Type of order (LIMIT, MARKET, etc.) + price: Order price (for limit orders) + position_action: Position action for perpetuals + + Returns: + Client order ID + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.buy( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + position_action=position_action + ) + + def sell( + self, + connector_name: str, + trading_pair: str, + amount: Decimal, + order_type: OrderType, + price: Decimal = Decimal("NaN"), + position_action: PositionAction = PositionAction.NIL + ) -> str: + """ + Place a sell order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + amount: Order amount in base currency + order_type: Type of order (LIMIT, MARKET, etc.) + price: Order price (for limit orders) + position_action: Position action for perpetuals + + Returns: + Client order ID + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.sell( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + position_action=position_action + ) + + def cancel( + self, + connector_name: str, + trading_pair: str, + order_id: str + ) -> str: + """ + Cancel an order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + order_id: Client order ID to cancel + + Returns: + Client order ID that was cancelled + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.cancel(trading_pair=trading_pair, client_order_id=order_id) + + def get_active_orders(self, connector_name: str) -> List: + """ + Get active orders for a connector. + + Args: + connector_name: Name of the connector + + Returns: + List of active in-flight orders + """ + connector = self.connectors.get(connector_name) + if not connector: + return [] + return list(connector.in_flight_orders.values()) + + # ======================================== + # Additional helper methods + # ======================================== + + def get_connector(self, connector_name: str) -> Optional[ConnectorBase]: + """ + Get a connector by name from the shared ConnectorManager. + + Args: + connector_name: Name of the connector + + Returns: + The connector instance or None if not loaded + """ + return self.connectors.get(connector_name) + + def is_connector_loaded(self, connector_name: str) -> bool: + """ + Check if a connector is loaded in the shared ConnectorManager. + + Args: + connector_name: Name of the connector + + Returns: + True if connector is loaded + """ + return connector_name in self.connectors + + def get_all_trading_pairs(self) -> Dict[str, Set[str]]: + """ + Get all active trading pairs by connector. + + Returns: + Dictionary mapping connector names to sets of trading pairs + """ + return {k: v.copy() for k, v in self._markets.items()} + + async def cleanup(self): + """ + Cleanup resources. Called when shutting down. + + Note: This does NOT clean up connectors since they are managed by the + shared ConnectorManager, not by AccountTradingInterface. + """ + # Clear only local state (markets tracking) + self._markets.clear() + logger.info(f"AccountTradingInterface cleanup completed for account {self._account_name}") + + class AccountsService: """ This class is responsible for managing all the accounts that are connected to the trading system. It is responsible @@ -45,7 +490,6 @@ class AccountsService: def __init__(self, account_update_interval: int = 5, default_quote: str = "USDT", - market_data_feed_manager: Optional[MarketDataFeedManager] = None, gateway_url: str = "http://localhost:15888"): """ Initialize the AccountsService. @@ -53,7 +497,6 @@ def __init__(self, Args: account_update_interval: How often to update account states in minutes (default: 5) default_quote: Default quote currency for trading pairs (default: "USDT") - market_data_feed_manager: Market data feed manager for price caching (optional) gateway_url: URL for Gateway service (default: "http://localhost:15888") """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) @@ -61,7 +504,6 @@ def __init__(self, self.update_account_state_interval = account_update_interval * 60 self.order_status_poll_interval = 60 # Poll order status every 1 minute self.default_quote = default_quote - self.market_data_feed_manager = market_data_feed_manager self._update_account_state_task: Optional[asyncio.Task] = None self._order_status_polling_task: Optional[asyncio.Task] = None @@ -69,8 +511,10 @@ def __init__(self, self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False - # Initialize connector manager with db_manager - self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) + # Services injected from main.py + self._connector_service = None # UnifiedConnectorService + self._market_data_service = None # MarketDataService + self._trading_service = None # TradingService # Initialize Gateway client self.gateway_client = GatewayClient(gateway_url) @@ -85,6 +529,29 @@ def __init__(self, ) self._gateway_poller_started = False + # Trading interfaces per account (for executor use) + self._trading_interfaces: Dict[str, AccountTradingInterface] = {} + + def get_trading_interface(self, account_name: str) -> AccountTradingInterface: + """ + Get or create a trading interface for the specified account. + + This interface provides ScriptStrategyBase-compatible methods + that executors can use for trading operations. + + Args: + account_name: Account to get trading interface for + + Returns: + AccountTradingInterface instance for the account + """ + if account_name not in self._trading_interfaces: + self._trading_interfaces[account_name] = AccountTradingInterface( + accounts_service=self, + account_name=account_name + ) + return self._trading_interfaces[account_name] + async def ensure_db_initialized(self): """Ensure database is initialized before using it.""" if not self._db_initialized: @@ -156,8 +623,15 @@ async def stop(self): except Exception as e: logger.error(f"Error stopping Gateway transaction poller: {e}", exc_info=True) - # Stop all connectors through the ConnectorManager - await self.connector_manager.stop_all_connectors() + # Cleanup trading interfaces + for interface in self._trading_interfaces.values(): + await interface.cleanup() + self._trading_interfaces.clear() + logger.info("Cleaned up trading interfaces") + + # Stop all connectors through the connector service + if self._connector_service: + await self._connector_service.stop_all() logger.info("AccountsService stopped successfully") @@ -171,7 +645,8 @@ async def update_account_state_loop(self): try: await self.check_all_connectors() # Update all connector states (balances, orders, positions, trading rules) - await self.connector_manager.update_all_connector_states() + if self._connector_service: + await self._connector_service.update_all_trading_connector_states() await self.update_account_state() await self.dump_account_state() except Exception as e: @@ -188,7 +663,8 @@ async def order_status_polling_loop(self): """ while True: try: - await self.connector_manager.sync_order_state_to_database_for_all_connectors() + if self._connector_service: + await self._connector_service.sync_all_orders_to_database() except Exception as e: logger.error(f"Error syncing order state to database: {e}") finally: @@ -266,64 +742,23 @@ async def check_all_connectors(self): async def _ensure_account_connectors_initialized(self, account_name: str): """ Ensure all connectors for a specific account are initialized. - This delegates to ConnectorManager for actual initialization. - + This delegates to the connector service for actual initialization. + :param account_name: The name of the account to initialize connectors for. """ + if not self._connector_service: + return + # Initialize missing connectors - for connector_name in self.connector_manager.list_available_credentials(account_name): + for connector_name in self._connector_service.list_available_credentials(account_name): try: # Only initialize if connector doesn't exist - if not self.connector_manager.is_connector_initialized(account_name, connector_name): + if not self._connector_service.is_trading_connector_initialized(account_name, connector_name): # Get connector will now handle all initialization - await self.connector_manager.get_connector(account_name, connector_name) + await self._connector_service.get_trading_connector(account_name, connector_name) except Exception as e: logger.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") - def _initialize_rate_sources_for_pairs(self, connector_name: str, trading_pairs: List[str]): - """ - Helper method to initialize rate sources for trading pairs. - - :param connector_name: The name of the connector. - :param trading_pairs: List of trading pairs to initialize. - """ - if not trading_pairs or not self.market_data_feed_manager: - return - - try: - connector_pairs = [ConnectorPair(connector_name=connector_name, trading_pair=trading_pair) - for trading_pair in trading_pairs] - self.market_data_feed_manager.market_data_provider.initialize_rate_sources(connector_pairs) - logger.info(f"Initialized rate sources for {len(trading_pairs)} trading pairs in {connector_name}") - except Exception as e: - logger.error(f"Error initializing rate sources for {connector_name}: {e}") - - async def _initialize_price_tracking(self, account_name: str, connector_name: str, connector): - """ - Initialize price tracking for a connector's tokens using MarketDataProvider. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :param connector: The connector instance. - """ - try: - # Get current balances to determine which tokens need price tracking - balances = connector.get_all_balances() - unique_tokens = [token for token, value in balances.items() if - value != Decimal("0") and token not in settings.banned_tokens and "USD" not in token] - - if unique_tokens: - # Create trading pairs for price tracking - trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens] - - # Initialize rate sources using helper method - self._initialize_rate_sources_for_pairs(connector_name, trading_pairs) - - logger.info(f"Initialized price tracking for {len(trading_pairs)} trading pairs in {connector_name} (Account: {account_name})") - - except Exception as e: - logger.error(f"Error initializing price tracking for {connector_name} in account {account_name}: {e}") - async def update_account_state( self, skip_gateway: bool = False, @@ -338,7 +773,7 @@ async def update_account_state( connector_names: If provided, only update these connectors. If None, update all connectors. For Gateway, this filters by chain-network (e.g., 'solana-mainnet-beta'). """ - all_connectors = self.connector_manager.get_all_connectors() + all_connectors = self._connector_service.get_all_trading_connectors() if self._connector_service else {} # Prepare parallel tasks tasks = [] @@ -387,35 +822,10 @@ async def _get_connector_tokens_info(self, connector, connector_name: str) -> Li unique_tokens = [balance["token"] for balance in balances] trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] - # Try to get cached prices first, fallback to live prices if needed - prices_from_cache = {} - trading_pairs_need_update = [] - - if self.market_data_feed_manager: - for trading_pair in trading_pairs: - try: - cached_price = self.market_data_feed_manager.market_data_provider.get_rate(trading_pair) - if cached_price > 0: - prices_from_cache[trading_pair] = cached_price - else: - trading_pairs_need_update.append(trading_pair) - except Exception: - trading_pairs_need_update.append(trading_pair) - else: - trading_pairs_need_update = trading_pairs - - # Add new trading pairs to market data provider if they need updates - if trading_pairs_need_update: - self._initialize_rate_sources_for_pairs(connector_name, trading_pairs_need_update) - logger.info(f"Added {len(trading_pairs_need_update)} new trading pairs to market data provider: {trading_pairs_need_update}") - - # Get fresh prices for pairs not in cache or with stale/zero prices - fresh_prices = {} - if trading_pairs_need_update: - fresh_prices = await self._safe_get_last_traded_prices(connector, trading_pairs_need_update) - - # Combine cached and fresh prices - all_prices = {**prices_from_cache, **fresh_prices} + # Get fresh prices for all trading pairs + all_prices = {} + if trading_pairs: + all_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) tokens_info = [] for balance in balances: @@ -471,25 +881,25 @@ def get_connector_config_map(self, connector_name: str): :param connector_name: The name of the connector. :return: The connector config map. """ - return self.connector_manager.get_connector_config_map(connector_name) + from services.unified_connector_service import UnifiedConnectorService + return UnifiedConnectorService.get_connector_config_map(connector_name) async def add_credentials(self, account_name: str, connector_name: str, credentials: dict): """ Add or update connector credentials and initialize the connector with validation. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :param credentials: Dictionary containing the connector credentials. :raises Exception: If credentials are invalid or connector cannot be initialized. """ + if not self._connector_service: + raise HTTPException(status_code=500, detail="Connector service not initialized") + try: # Update the connector keys (this saves the credentials to file and validates them) - connector = await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) - - # Initialize price tracking for this connector's tokens if market data manager is available - if self.market_data_feed_manager: - await self._initialize_price_tracking(account_name, connector_name, connector) - + connector = await self._connector_service.update_connector_keys(account_name, connector_name, credentials) + await self.update_account_state() except Exception as e: logger.error(f"Error adding connector credentials for account {account_name}: {e}") @@ -529,16 +939,16 @@ async def delete_credentials(self, account_name: str, connector_name: str): fs_util.delete_file(directory=f"credentials/{account_name}/connectors", file_name=f"{connector_name}.yml") # Always perform cleanup regardless of file existence - # Stop the connector if it's running - await self.connector_manager.stop_connector(account_name, connector_name) + if self._connector_service: + # Stop the connector if it's running + await self._connector_service.stop_trading_connector(account_name, connector_name) + # Clear the connector from cache + self._connector_service.clear_trading_connector(account_name, connector_name) # Remove from account state if account_name in self.accounts_state and connector_name in self.accounts_state[account_name]: self.accounts_state[account_name].pop(connector_name) - # Clear the connector from cache - self.connector_manager.clear_cache(account_name, connector_name) - def add_account(self, account_name: str): """ Add a new account. @@ -565,18 +975,18 @@ async def delete_account(self, account_name: str): :return: """ # Stop all connectors for this account - for connector_name in self.connector_manager.list_account_connectors(account_name): - await self.connector_manager.stop_connector(account_name, connector_name) - + if self._connector_service: + for connector_name in self._connector_service.list_account_connectors(account_name): + await self._connector_service.stop_trading_connector(account_name, connector_name) + # Clear all connectors for this account from cache + self._connector_service.clear_trading_connector(account_name) + # Delete account folder fs_util.delete_folder('credentials', account_name) - + # Remove from account state if account_name in self.accounts_state: self.accounts_state.pop(account_name) - - # Clear all connectors for this account from cache - self.connector_manager.clear_cache(account_name) async def get_account_current_state(self, account_name: str) -> Dict[str, List[Dict]]: """ @@ -901,13 +1311,12 @@ def get_account_distribution(self) -> Dict[str, any]: "error": str(e) } - async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, - trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, - price: Optional[Decimal] = None, position_action: PositionAction = PositionAction.OPEN, - market_data_manager: Optional[MarketDataFeedManager] = None) -> str: + async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, + trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, + price: Optional[Decimal] = None, position_action: PositionAction = PositionAction.OPEN) -> str: """ Place a trade using the specified account and connector. - + Args: account_name: Name of the account to trade with connector_name: Name of the connector/exchange @@ -917,24 +1326,21 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair order_type: "LIMIT", "MARKET", or "LIMIT_MAKER" price: Price for limit orders (required for LIMIT and LIMIT_MAKER) position_action: Position action for perpetual contracts (OPEN/CLOSE) - market_data_manager: Market data manager for price fetching - + Returns: Client order ID assigned by the connector - + Raises: HTTPException: If account, connector not found, or trade fails """ # Validate account exists if account_name not in self.list_accounts(): raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Validate connector exists for account - if not self.connector_manager.is_connector_initialized(account_name, connector_name): - raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") - - # Get the connector instance - connector = await self.connector_manager.get_connector(account_name, connector_name) + + if not self._connector_service: + raise HTTPException(status_code=500, detail="Connector service not initialized") + + connector = await self._connector_service.get_trading_connector(account_name, connector_name) # Validate price for limit orders if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] and price is None: @@ -980,14 +1386,14 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair notional_size = quantized_price * quantized_amount else: # For market orders without price, get current market price for validation - if market_data_manager: + if self._market_data_service: try: - prices = await market_data_manager.get_prices(connector_name, [trading_pair]) + prices = await self._market_data_service.get_prices(connector_name, [trading_pair]) if trading_pair in prices and "error" not in prices: price = Decimal(str(prices[trading_pair])) except Exception as e: logger.error(f"Error getting market price for {trading_pair}: {e}") - notional_size = price * quantized_amount + notional_size = price * quantized_amount if price else Decimal("0") if notional_size < trading_rule.min_notional_size: raise HTTPException( @@ -1031,26 +1437,24 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair async def get_connector_instance(self, account_name: str, connector_name: str): """ Get a connector instance for direct access. - + Args: account_name: Name of the account connector_name: Name of the connector - + Returns: Connector instance - + Raises: HTTPException: If account or connector not found """ if account_name not in self.list_accounts(): raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Check if connector credentials exist - available_credentials = self.connector_manager.list_available_credentials(account_name) - if connector_name not in available_credentials: - raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") - - return await self.connector_manager.get_connector(account_name, connector_name) + + if not self._connector_service: + raise HTTPException(status_code=500, detail="Connector service not initialized") + + return await self._connector_service.get_trading_connector(account_name, connector_name) async def _get_perpetual_connector(self, account_name: str, connector_name: str): """ @@ -1662,65 +2066,21 @@ async def get_gateway_balances(self, chain: str, address: str, network: Optional "units": Decimal(str(balance)) }) - # Get prices using rate sources (similar to _get_connector_tokens_info) + # Get prices for tokens unique_tokens = [b["token"] for b in balances_list] + all_prices = {} - # Try to get cached prices first - # Try USDT first (more common in CEX like Binance), then USDC (common in DEX) - prices_from_cache = {} - tokens_need_update = [] - - if self.market_data_feed_manager: - for token in unique_tokens: - try: - token_unwrapped = self.get_unwrapped_token(token) - # Try USDT first (Binance, etc.), then USDC as fallback (DEX) - found_price = False - for quote in ["USDT", "USDC"]: - trading_pair = f"{token_unwrapped}-{quote}" - try: - cached_price = self.market_data_feed_manager.market_data_provider.get_rate(trading_pair) - if cached_price > 0: - prices_from_cache[token] = cached_price - found_price = True - break - except Exception: - continue - if not found_price: - tokens_need_update.append(token) - except Exception: - tokens_need_update.append(token) - else: - tokens_need_update = unique_tokens - - # Initialize rate sources for Gateway using the old format: "gateway_{chain}-{network}" - # The MarketDataProvider.update_rates_task() will detect this format and resolve - # the correct pricing connector (jupiter/router for solana, uniswap/router for ethereum, etc.) - if tokens_need_update and self.market_data_feed_manager: - # Use the format that MarketDataProvider expects for gateway connectors - gateway_connector_key = f"gateway_{chain}-{network}" - trading_pairs_need_update = [f"{token}-USDC" for token in tokens_need_update] - connector_pairs = [ConnectorPair(connector_name=gateway_connector_key, trading_pair=tp) for tp in trading_pairs_need_update] - - for pair in connector_pairs: - self.market_data_feed_manager.market_data_provider._rates_required.add_or_update( - gateway_connector_key, pair - ) - logger.info(f"Added {len(trading_pairs_need_update)} Gateway trading pairs to market data provider for {gateway_connector_key}: {trading_pairs_need_update}") - - # Trigger immediate price fetch for the new tokens + # Fetch prices for Gateway tokens + if unique_tokens: try: fetched_prices = await self._fetch_gateway_prices_immediate( - chain, network, tokens_need_update + chain, network, unique_tokens ) for token, price in fetched_prices.items(): if price > 0: - prices_from_cache[token] = price + all_prices[token] = price except Exception as e: - logger.warning(f"Error fetching immediate gateway prices: {e}") - - # Use cached prices - all_prices = prices_from_cache + logger.warning(f"Error fetching gateway prices: {e}") # Format final result with prices formatted_balances = [] From c533ed16a910af93edab7cdfc6e348d39f7b2491 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:27:35 -0300 Subject: [PATCH 08/20] (feat) crete unified connector service replacing utils --- deps.py | 28 +- services/unified_connector_service.py | 1252 +++++++++++++++++++++++++ utils/connector_manager.py | 686 -------------- 3 files changed, 1275 insertions(+), 691 deletions(-) create mode 100644 services/unified_connector_service.py delete mode 100644 utils/connector_manager.py diff --git a/deps.py b/deps.py index 3c1fa8c1..99e0f776 100644 --- a/deps.py +++ b/deps.py @@ -3,7 +3,10 @@ from services.accounts_service import AccountsService from services.docker_service import DockerService from services.gateway_service import GatewayService -from services.market_data_feed_manager import MarketDataFeedManager +from services.unified_connector_service import UnifiedConnectorService +from services.market_data_service import MarketDataService +from services.trading_service import TradingService +from services.executor_service import ExecutorService from utils.bot_archiver import BotArchiver from database import AsyncDatabaseManager @@ -28,9 +31,24 @@ def get_gateway_service(request: Request) -> GatewayService: return request.app.state.gateway_service -def get_market_data_feed_manager(request: Request) -> MarketDataFeedManager: - """Get MarketDataFeedManager from app state.""" - return request.app.state.market_data_feed_manager +def get_connector_service(request: Request) -> UnifiedConnectorService: + """Get UnifiedConnectorService from app state.""" + return request.app.state.connector_service + + +def get_market_data_service(request: Request) -> MarketDataService: + """Get MarketDataService from app state.""" + return request.app.state.market_data_service + + +def get_trading_service(request: Request) -> TradingService: + """Get TradingService from app state.""" + return request.app.state.trading_service + + +def get_executor_service(request: Request) -> ExecutorService: + """Get ExecutorService from app state.""" + return request.app.state.executor_service def get_bot_archiver(request: Request) -> BotArchiver: @@ -40,4 +58,4 @@ def get_bot_archiver(request: Request) -> BotArchiver: def get_database_manager(request: Request) -> AsyncDatabaseManager: """Get AsyncDatabaseManager from app state.""" - return request.app.state.accounts_service.db_manager \ No newline at end of file + return request.app.state.db_manager diff --git a/services/unified_connector_service.py b/services/unified_connector_service.py new file mode 100644 index 00000000..43904ae2 --- /dev/null +++ b/services/unified_connector_service.py @@ -0,0 +1,1252 @@ +""" +UnifiedConnectorService - Single source of truth for all connector instances. + +This service consolidates connector management from: +- ConnectorManager (trading connectors) +- MarketDataProvider._non_trading_connectors (data-only connectors) + +Key features: +- Trading connectors: authenticated, per-account, with order tracking +- Data connectors: non-authenticated, shared, for public market data +- get_best_connector_for_market(): prefers trading connector (has order book tracker) +""" +import asyncio +import logging +import time +from decimal import Decimal +from typing import Dict, List, Optional + +from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger +from hummingbot.client.config.config_helpers import ( + api_keys_from_connector_config_map, + get_connector_class, + ClientConfigAdapter, +) +from hummingbot.client.settings import AllConnectorSettings +from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.connector.connector_metrics_collector import TradeVolumeMetricCollector +from hummingbot.connector.perpetual_derivative_py_base import PerpetualDerivativePyBase +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.rate_oracle.rate_oracle import RateOracle +from hummingbot.core.utils.async_utils import safe_ensure_future + +from utils.file_system import fs_util +from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter +from utils.security import BackendAPISecurity + +logger = logging.getLogger(__name__) + + +class UnifiedConnectorService: + """ + Single source of truth for ALL connector instances. + + Manages two types of connectors: + 1. Trading connectors: authenticated, per-account, with full trading capabilities + 2. Data connectors: non-authenticated, shared, for public market data only + + The key method `get_best_connector_for_market()` ensures that order book + operations use the trading connector when available (which already has + order_book_tracker running), falling back to data connector otherwise. + """ + + METRICS_ACTIVATION_INTERVAL = Decimal("900") # 15 minutes + METRICS_VALUATION_TOKEN = "USDT" + + def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): + self.secrets_manager = secrets_manager + self.db_manager = db_manager + + # Trading connectors: account_name -> connector_name -> ConnectorBase + self._trading_connectors: Dict[str, Dict[str, ConnectorBase]] = {} + + # Data-only connectors: connector_name -> ConnectorBase (shared, non-authenticated) + self._data_connectors: Dict[str, ConnectorBase] = {} + self._data_connectors_started: Dict[str, bool] = {} + + # Order and funding recorders (for trading connectors) + self._orders_recorders: Dict[str, any] = {} + self._funding_recorders: Dict[str, any] = {} + self._metrics_collectors: Dict[str, TradeVolumeMetricCollector] = {} + + # Connector settings cache + self._conn_settings = AllConnectorSettings.get_connector_settings() + + # ========================================================================= + # Trading Pairs Type-Normalizer Helpers + # ========================================================================= + + def _add_to_trading_pairs(self, trading_pairs_attr, trading_pair: str) -> bool: + """Add trading pair to a _trading_pairs attribute regardless of its type. + + Args: + trading_pairs_attr: The _trading_pairs attribute (set, list, or dict) + trading_pair: The trading pair to add + + Returns: + True if added, False if already present or unsupported type + """ + if isinstance(trading_pairs_attr, set): + if trading_pair not in trading_pairs_attr: + trading_pairs_attr.add(trading_pair) + return True + return False + elif isinstance(trading_pairs_attr, list): + if trading_pair not in trading_pairs_attr: + trading_pairs_attr.append(trading_pair) + return True + return False + return False + + def _remove_from_trading_pairs(self, trading_pairs_attr, trading_pair: str) -> bool: + """Remove trading pair from a _trading_pairs attribute regardless of its type. + + Args: + trading_pairs_attr: The _trading_pairs attribute (set, list, or dict) + trading_pair: The trading pair to remove + + Returns: + True if removed, False if not present or unsupported type + """ + if isinstance(trading_pairs_attr, set): + if trading_pair in trading_pairs_attr: + trading_pairs_attr.discard(trading_pair) + return True + return False + elif isinstance(trading_pairs_attr, list): + if trading_pair in trading_pairs_attr: + trading_pairs_attr.remove(trading_pair) + return True + return False + return False + + def _is_perpetual_connector(self, connector: ConnectorBase) -> bool: + """Check if connector is a perpetual derivative connector. + + Args: + connector: The connector instance to check + + Returns: + True if perpetual connector, False otherwise + """ + return isinstance(connector, PerpetualDerivativePyBase) + + # ========================================================================= + # Trading Connector Management (authenticated, per-account) + # ========================================================================= + + async def get_trading_connector( + self, + account_name: str, + connector_name: str + ) -> ConnectorBase: + """ + Get or create an authenticated trading connector for a specific account. + + Trading connectors have: + - API key authentication + - Order tracking (OrdersRecorder) + - Funding tracking for perpetuals (FundingRecorder) + - Metrics collection + - Full trading capabilities + + Args: + account_name: The account name + connector_name: The connector name (e.g., "binance", "binance_perpetual") + + Returns: + Initialized trading connector + """ + if account_name not in self._trading_connectors: + self._trading_connectors[account_name] = {} + + if connector_name not in self._trading_connectors[account_name]: + connector = await self._create_and_initialize_trading_connector( + account_name, connector_name + ) + self._trading_connectors[account_name][connector_name] = connector + + return self._trading_connectors[account_name][connector_name] + + def get_all_trading_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: + """ + Get all trading connectors organized by account. + + Returns: + Dict mapping account_name -> connector_name -> ConnectorBase + """ + return self._trading_connectors + + def get_account_connectors(self, account_name: str) -> Dict[str, ConnectorBase]: + """ + Get all connectors for a specific account. + + Args: + account_name: Account name + + Returns: + Dict mapping connector_name -> ConnectorBase for this account + """ + return self._trading_connectors.get(account_name, {}) + + def is_trading_connector_initialized( + self, + account_name: str, + connector_name: str + ) -> bool: + """Check if a trading connector is already initialized.""" + return ( + account_name in self._trading_connectors and + connector_name in self._trading_connectors[account_name] + ) + + # ========================================================================= + # Data Connector Management (non-authenticated, shared) + # ========================================================================= + + def get_data_connector(self, connector_name: str) -> ConnectorBase: + """ + Get or create a non-authenticated data connector for public market data. + + Data connectors: + - No API keys required (public endpoints only) + - Shared across accounts + - Used for: trading rules, prices, order books, candles + - NOT used for: trading, balance queries + + Args: + connector_name: The connector name + + Returns: + Non-authenticated connector instance + """ + if connector_name not in self._data_connectors: + self._data_connectors[connector_name] = self._create_data_connector( + connector_name + ) + return self._data_connectors[connector_name] + + async def ensure_data_connector_started( + self, + connector_name: str, + trading_pair: str + ) -> bool: + """ + Ensure a data connector's network is started with at least one trading pair. + + This is needed because exchanges close WebSocket connections without subscriptions. + + Args: + connector_name: The connector name + trading_pair: Initial trading pair to subscribe to + + Returns: + True if started successfully + """ + if self._data_connectors_started.get(connector_name, False): + return True + + connector = self.get_data_connector(connector_name) + + try: + # Add trading pair before starting network + if hasattr(connector, '_trading_pairs'): + self._add_to_trading_pairs(connector._trading_pairs, trading_pair) + + # Start network + await connector.start_network() + self._data_connectors_started[connector_name] = True + logger.info(f"Started data connector: {connector_name} with pair {trading_pair}") + + # Wait for order book tracker to be ready + max_wait = 30 + waited = 0 + tracker = connector.order_book_tracker + while waited < max_wait: + if tracker._order_book_stream_listener_task is not None: + await asyncio.sleep(2.0) + break + await asyncio.sleep(0.5) + waited += 0.5 + + return True + + except Exception as e: + logger.error(f"Error starting data connector {connector_name}: {e}") + return False + + # ========================================================================= + # Best Connector Selection (THE KEY FIX) + # ========================================================================= + + def get_best_connector_for_market( + self, + connector_name: str, + account_name: Optional[str] = None + ) -> Optional[ConnectorBase]: + """ + Get the best available connector for market operations (order books, prices). + + CRITICAL: This method ensures order book initialization uses the correct + connector. It prefers trading connectors because they already have + order_book_tracker running with WebSocket connections. + + Priority: + 1. Specific account's trading connector (if account_name provided) + 2. Any trading connector for this connector_name + 3. Data connector (creates new if needed) + + Args: + connector_name: The connector name + account_name: Optional account to prefer + + Returns: + Best available connector for market operations + """ + # 1. Try specific account's trading connector + if account_name: + trading = self._trading_connectors.get(account_name, {}).get(connector_name) + if trading: + logger.debug( + f"Using trading connector for {connector_name} " + f"(account: {account_name})" + ) + return trading + + # 2. Try ANY trading connector for this connector_name + for acc_name, acc_connectors in self._trading_connectors.items(): + if connector_name in acc_connectors: + logger.debug( + f"Using trading connector for {connector_name} " + f"(found in account: {acc_name})" + ) + return acc_connectors[connector_name] + + # 3. Fall back to data connector + logger.debug(f"Using data connector for {connector_name} (no trading connector)") + return self.get_data_connector(connector_name) + + # ========================================================================= + # Order Book Initialization + # ========================================================================= + + async def initialize_order_book( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None, + timeout: float = 30.0 + ) -> bool: + """ + Initialize order book for a trading pair using the best available connector. + + This method: + 1. Gets the best connector (prefers trading over data) + 2. Adds trading pair to order book tracker + 3. Waits for order book to have valid data + + Args: + connector_name: The connector name + trading_pair: The trading pair + account_name: Optional account to prefer + timeout: Timeout in seconds + + Returns: + True if order book initialized successfully + """ + connector = self.get_best_connector_for_market(connector_name, account_name) + + if not connector: + logger.error(f"No connector available for {connector_name}") + return False + + if not hasattr(connector, 'order_book_tracker'): + logger.warning(f"Connector {connector_name} has no order_book_tracker") + return False + + tracker = connector.order_book_tracker + + # Check if already initialized + if trading_pair in tracker.order_books: + ob = tracker.order_books[trading_pair] + try: + bids, asks = ob.snapshot + if len(bids) > 0 and len(asks) > 0: + logger.info(f"Order book for {trading_pair} already initialized") + return True + except Exception: + pass + + # For data connectors, ensure network is started + if connector_name in self._data_connectors: + if not self._data_connectors_started.get(connector_name, False): + success = await self.ensure_data_connector_started( + connector_name, trading_pair + ) + if not success: + return False + # Wait for order book after starting + return await self._wait_for_order_book(tracker, trading_pair, timeout) + else: + # Connector started, dynamically add trading pair + success = await self._add_trading_pair_to_tracker( + connector, trading_pair + ) + if not success: + return False + + # For trading connectors, dynamically add trading pair + else: + success = await self._add_trading_pair_to_tracker(connector, trading_pair) + if not success: + return False + + # Wait for order book to have data + return await self._wait_for_order_book(tracker, trading_pair, timeout) + + def _ensure_order_book_tracker_started(self, connector: ConnectorBase) -> bool: + """Ensure the order book tracker is started for a connector. + + This is called lazily when the first trading pair is added, not at + connector initialization. Exchanges like Binance disconnect WebSocket + connections that have no subscriptions, so we must wait until we have + at least one trading pair before starting the tracker. + + Returns: + True if tracker is running (or was started), False if no tracker available + """ + if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: + return False + + tracker = connector.order_book_tracker + + # Check if already running + if hasattr(tracker, '_order_book_stream_listener_task') and tracker._order_book_stream_listener_task: + return True + + # Start the tracker + if hasattr(tracker, 'start'): + try: + tracker.start() + logger.info(f"Started order book tracker for {type(connector).__name__}") + return True + except Exception as e: + logger.error(f"Failed to start order book tracker: {e}") + return False + + return False + + async def _add_trading_pair_to_tracker( + self, + connector: ConnectorBase, + trading_pair: str + ) -> bool: + """Add a trading pair to connector's order book tracker. + + Uses the connector's add_trading_pair method which is now available on + ExchangePyBase (and PerpetualDerivativePyBase). This method handles: + - Order book initialization via order_book_tracker + - Funding info initialization for perpetual connectors + + IMPORTANT: This method ensures trading pairs are added BEFORE starting + the order book tracker. Exchanges like Binance disconnect WebSockets + that have no subscriptions, so we must register the trading pair first. + """ + try: + # CRITICAL: First register the trading pair with the tracker's internal set + # This must happen BEFORE starting the tracker so it knows what to subscribe to + if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: + tracker = connector.order_book_tracker + if hasattr(tracker, '_trading_pairs'): + was_added = self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) + if was_added: + logger.debug(f"Registered {trading_pair} with order book tracker's _trading_pairs") + + # Now ensure order book tracker is started (lazy initialization) + # The tracker will use _trading_pairs to know what to subscribe to + tracker_started = self._ensure_order_book_tracker_started(connector) + if not tracker_started: + logger.warning(f"Could not start order book tracker for {type(connector).__name__}") + + # FIX: Check if order book was initialized during tracker startup. + # This happens for the FIRST trading pair when tracker.start() calls _init_order_books(). + # Without this check, the code would continue to connector.add_trading_pair() which + # returns False (pair already exists), then fall through to the data source fallback + # which OVERWRITES the order book unnecessarily. + tracker = None + if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: + tracker = connector.order_book_tracker + if hasattr(tracker, 'order_books') and trading_pair in tracker.order_books: + try: + ob = tracker.order_books[trading_pair] + bids, asks = ob.snapshot + if len(bids) > 0 or len(asks) > 0: + logger.info(f"Order book for {trading_pair} initialized during tracker startup") + return True + except Exception: + pass # Order book exists but may not have data yet, continue + + # Try connector.add_trading_pair() first - available on ExchangePyBase + # and PerpetualDerivativePyBase (handles funding info automatically) + if hasattr(connector, 'add_trading_pair'): + try: + result = await connector.add_trading_pair(trading_pair) + if result: + logger.info(f"Added trading pair {trading_pair} via connector.add_trading_pair()") + return True + except Exception as e: + logger.debug(f"connector.add_trading_pair failed: {e}") + + # Fallback: Try order_book_tracker.add_trading_pair directly + # (for older connectors that don't have the base class method) + if hasattr(connector, 'order_book_tracker') and hasattr(connector.order_book_tracker, 'add_trading_pair'): + try: + result = await connector.order_book_tracker.add_trading_pair(trading_pair) + if result: + logger.info(f"Added trading pair {trading_pair} via order_book_tracker.add_trading_pair()") + return True + except Exception as e: + logger.debug(f"order_book_tracker.add_trading_pair failed: {e}") + + # Last resort fallback: Use orderbook data source to initialize order book directly + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + try: + orderbook_ds = connector._orderbook_ds + tracker = connector.order_book_tracker + + # Get initial order book from data source + order_book = await orderbook_ds.get_new_order_book(trading_pair) + + # Add to tracker's order_books dict + if hasattr(tracker, 'order_books'): + tracker.order_books[trading_pair] = order_book + + # Also add to trading pairs tracking + if hasattr(tracker, '_trading_pairs'): + self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) + + logger.info(f"Initialized order book for {trading_pair} via data source fallback") + return True + + except Exception as e: + logger.error(f"Failed to initialize order book via data source: {e}") + + logger.warning( + f"Connector {type(connector).__name__} doesn't support " + f"dynamic trading pair addition" + ) + return False + + except Exception as e: + logger.error(f"Error adding trading pair {trading_pair}: {e}") + return False + + async def remove_trading_pair( + self, + connector_name: str, + trading_pair: str, + account_name: Optional[str] = None + ) -> bool: + """ + Remove a trading pair from a connector's order book tracker. + + This method cleans up order book resources for a trading pair that is + no longer needed. Useful for: + - Executor cleanup when stopping + - Memory management for unused pairs + - Account cleanup operations + + Args: + connector_name: The connector name + trading_pair: The trading pair to remove + account_name: Optional account to target specific trading connector + + Returns: + True if successfully removed, False otherwise + """ + connector = self.get_best_connector_for_market(connector_name, account_name) + + if not connector: + logger.warning(f"No connector available for {connector_name} to remove {trading_pair}") + return False + + return await self._remove_trading_pair_from_tracker(connector, trading_pair) + + async def _remove_trading_pair_from_tracker( + self, + connector: ConnectorBase, + trading_pair: str + ) -> bool: + """Remove a trading pair from connector's order book tracker. + + Uses the connector's remove_trading_pair method which is now available on + ExchangePyBase (and PerpetualDerivativePyBase). This method handles: + - Order book cleanup via order_book_tracker + - Funding info cleanup for perpetual connectors + """ + try: + # Try connector.remove_trading_pair() first - available on ExchangePyBase + # and PerpetualDerivativePyBase (handles funding info cleanup automatically) + if hasattr(connector, 'remove_trading_pair'): + try: + result = await connector.remove_trading_pair(trading_pair) + if result: + logger.info(f"Removed trading pair {trading_pair} via connector.remove_trading_pair()") + return True + except Exception as e: + logger.debug(f"connector.remove_trading_pair failed: {e}") + + # Fallback: Try order_book_tracker.remove_trading_pair directly + if hasattr(connector, 'order_book_tracker') and hasattr(connector.order_book_tracker, 'remove_trading_pair'): + try: + result = await connector.order_book_tracker.remove_trading_pair(trading_pair) + if result: + logger.info(f"Removed trading pair {trading_pair} via order_book_tracker.remove_trading_pair()") + return True + except Exception as e: + logger.debug(f"order_book_tracker.remove_trading_pair failed: {e}") + + # Last resort fallback: Manual removal from tracker + if hasattr(connector, 'order_book_tracker'): + tracker = connector.order_book_tracker + removed = False + + # Remove from order_books dict + if hasattr(tracker, 'order_books') and trading_pair in tracker.order_books: + del tracker.order_books[trading_pair] + removed = True + + # Remove from trading pairs tracking + if hasattr(tracker, '_trading_pairs'): + self._remove_from_trading_pairs(tracker._trading_pairs, trading_pair) + + if removed: + logger.info(f"Removed trading pair {trading_pair} via manual fallback") + return True + + logger.warning( + f"Connector {type(connector).__name__} doesn't support " + f"dynamic trading pair removal or pair not found" + ) + return False + + except Exception as e: + logger.error(f"Error removing trading pair {trading_pair}: {e}") + return False + + async def _wait_for_order_book( + self, + tracker, + trading_pair: str, + timeout: float + ) -> bool: + """Wait for order book to have valid bid/ask data.""" + waited = 0 + interval = 0.5 + + while waited < timeout: + if trading_pair in tracker.order_books: + ob = tracker.order_books[trading_pair] + try: + bids, asks = ob.snapshot + if len(bids) > 0 and len(asks) > 0: + logger.info( + f"Order book for {trading_pair} ready with " + f"{len(bids)} bids and {len(asks)} asks" + ) + return True + except Exception: + pass + await asyncio.sleep(interval) + waited += interval + + logger.warning(f"Timeout waiting for {trading_pair} order book") + return False + + # ========================================================================= + # Trading Connector Creation (internal) + # ========================================================================= + + async def _create_and_initialize_trading_connector( + self, + account_name: str, + connector_name: str + ) -> ConnectorBase: + """Create and fully initialize a trading connector.""" + # Authenticate and create connector + connector = self._create_trading_connector(account_name, connector_name) + + # Initialize symbol map and trading rules + await connector._initialize_trading_pair_symbol_map() + await connector._update_trading_rules() + await connector._update_balances() + + # Perpetual-specific setup + if self._is_perpetual_connector(connector): + if PositionMode.HEDGE in connector.supported_position_modes(): + connector.set_position_mode(PositionMode.HEDGE) + await connector._update_positions() + + # Load existing orders from database + if self.db_manager: + await self._load_existing_orders(connector, account_name, connector_name) + + # Setup order and funding recorders + cache_key = f"{account_name}:{connector_name}" + if self.db_manager and cache_key not in self._orders_recorders: + from services.orders_recorder import OrdersRecorder + orders_recorder = OrdersRecorder(self.db_manager, account_name, connector_name) + orders_recorder.start(connector) + self._orders_recorders[cache_key] = orders_recorder + + if self._is_perpetual_connector(connector): + from services.funding_recorder import FundingRecorder + funding_recorder = FundingRecorder(self.db_manager, account_name, connector_name) + funding_recorder.start(connector) + self._funding_recorders[cache_key] = funding_recorder + + # Initialize metrics + self._initialize_metrics(connector, account_name, connector_name, cache_key) + + # Start network tasks + await self._start_connector_network(connector) + + # Initial state update + await self._update_connector_state(connector, connector_name, account_name) + + logger.info(f"Initialized trading connector {connector_name} for {account_name}") + return connector + + def _create_trading_connector( + self, + account_name: str, + connector_name: str + ) -> ConnectorBase: + """Create a trading connector with API keys.""" + BackendAPISecurity.login_account( + account_name=account_name, + secrets_manager=self.secrets_manager + ) + + conn_setting = self._conn_settings[connector_name] + keys = BackendAPISecurity.api_keys(connector_name) + + init_params = conn_setting.conn_init_parameters( + trading_pairs=[], + trading_required=True, + api_keys=keys, + ) + + connector_class = get_connector_class(connector_name) + return connector_class(**init_params) + + def _create_data_connector(self, connector_name: str) -> ConnectorBase: + """Create a non-authenticated data connector.""" + conn_setting = self._conn_settings.get(connector_name) + if not conn_setting: + raise ValueError(f"Connector {connector_name} not found") + + # Get config keys but don't use real API keys + connector_config = AllConnectorSettings.get_connector_config_keys(connector_name) + if getattr(connector_config, "use_auth_for_public_endpoints", False): + api_keys = api_keys_from_connector_config_map( + ClientConfigAdapter(connector_config) + ) + elif connector_config is not None: + api_keys = { + key: "" + for key in connector_config.__class__.model_fields.keys() + if key != "connector" + } + else: + api_keys = {} + + init_params = conn_setting.conn_init_parameters( + trading_pairs=[], + trading_required=False, + api_keys=api_keys, + ) + + connector_class = get_connector_class(connector_name) + connector = connector_class(**init_params) + + logger.info(f"Created data connector: {connector_name}") + return connector + + # ========================================================================= + # Network and State Management + # ========================================================================= + + async def _start_connector_network(self, connector: ConnectorBase): + """Start connector network tasks.""" + try: + await self._stop_connector_network(connector) + + connector._trading_rules_polling_task = safe_ensure_future( + connector._trading_rules_polling_loop() + ) + connector._trading_fees_polling_task = safe_ensure_future( + connector._trading_fees_polling_loop() + ) + connector._user_stream_tracker_task = connector._create_user_stream_tracker_task() + connector._user_stream_event_listener_task = safe_ensure_future( + connector._user_stream_event_listener() + ) + connector._lost_orders_update_task = safe_ensure_future( + connector._lost_orders_update_polling_loop() + ) + + # NOTE: Order book tracker is started lazily when first trading pair is added + # (in _add_trading_pair_to_tracker). Starting it here with no subscriptions + # causes exchanges like Binance to immediately disconnect (close code 1008). + + logger.debug(f"Started network tasks for connector") + + except Exception as e: + logger.error(f"Error starting connector network: {e}") + raise + + async def _stop_connector_network(self, connector: ConnectorBase): + """Stop connector network tasks.""" + tasks = [ + '_trading_rules_polling_task', + '_trading_fees_polling_task', + '_status_polling_task', + '_user_stream_tracker_task', + '_user_stream_event_listener_task', + '_lost_orders_update_task', + ] + + for task_name in tasks: + task = getattr(connector, task_name, None) + if task: + task.cancel() + setattr(connector, task_name, None) + + # Stop the order book tracker + if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: + tracker = connector.order_book_tracker + if hasattr(tracker, 'stop'): + tracker.stop() + + async def _update_connector_state( + self, + connector: ConnectorBase, + connector_name: str, + account_name: str = None + ): + """Update connector state (balances, rules, positions).""" + try: + connector._set_current_timestamp(time.time()) + await connector._update_balances() + await connector._update_trading_rules() + + if self._is_perpetual_connector(connector): + await connector._update_positions() + + if hasattr(connector, '_update_order_status') and connector.in_flight_orders: + await connector._update_order_status() + if account_name: + await self._sync_orders_to_database( + connector, account_name, connector_name + ) + + except Exception as e: + logger.error(f"Error updating connector state: {e}") + + async def update_all_trading_connector_states(self): + """Update state for all trading connectors.""" + for account_name, connectors in self._trading_connectors.items(): + for connector_name, connector in connectors.items(): + try: + await self._update_connector_state( + connector, connector_name, account_name + ) + except Exception as e: + logger.error( + f"Error updating {account_name}/{connector_name}: {e}" + ) + + async def initialize_all_trading_connectors(self): + """ + Initialize all trading connectors for all accounts at startup. + + This ensures that: + 1. All connectors are ready to use immediately + 2. Existing orders from database are loaded into in_flight_orders + 3. Order tracking and cancellation work without needing manual initialization + """ + # Get list of all accounts + accounts = fs_util.list_folders('credentials') + + total_initialized = 0 + for account_name in accounts: + # Get all connector credentials for this account + connector_names = self.list_available_credentials(account_name) + + for connector_name in connector_names: + try: + logger.info(f"Initializing connector: {account_name}/{connector_name}") + await self.get_trading_connector(account_name, connector_name) + total_initialized += 1 + except Exception as e: + logger.error(f"Failed to initialize {account_name}/{connector_name}: {e}") + # Continue with other connectors even if one fails + continue + + logger.info(f"Initialized {total_initialized} trading connectors across {len(accounts)} accounts") + + # ========================================================================= + # Order Management + # ========================================================================= + + async def _load_existing_orders( + self, + connector: ConnectorBase, + account_name: str, + connector_name: str + ): + """Load existing orders from database into connector.""" + try: + from database import OrderRepository + + async with self.db_manager.get_session_context() as session: + order_repo = OrderRepository(session) + active_orders = await order_repo.get_active_orders( + account_name=account_name, + connector_name=connector_name + ) + + for order_record in active_orders: + try: + in_flight_order = self._convert_db_order_to_in_flight(order_record) + connector.in_flight_orders[in_flight_order.client_order_id] = in_flight_order + except Exception as e: + logger.error(f"Error loading order {order_record.client_order_id}: {e}") + + logger.info( + f"Loaded {len(connector.in_flight_orders)} orders for " + f"{account_name}/{connector_name}" + ) + + except Exception as e: + logger.error(f"Error loading orders from database: {e}") + + async def _sync_orders_to_database( + self, + connector: ConnectorBase, + account_name: str, + connector_name: str + ): + """Sync connector's in_flight_orders state to database.""" + if not self.db_manager: + return + + terminal_states = [ + OrderState.FILLED, OrderState.CANCELED, + OrderState.FAILED, OrderState.COMPLETED + ] + orders_to_remove = [] + + for client_order_id, order in list(connector.in_flight_orders.items()): + try: + from database import OrderRepository + + async with self.db_manager.get_session_context() as session: + order_repo = OrderRepository(session) + db_order = await order_repo.get_order_by_client_id(client_order_id) + + if db_order: + new_status = self._map_order_state_to_status(order.current_state) + if db_order.status != new_status: + await order_repo.update_order_status(client_order_id, new_status) + + if order.current_state in terminal_states: + orders_to_remove.append(client_order_id) + + except Exception as e: + logger.error(f"Error syncing order {client_order_id}: {e}") + + for order_id in orders_to_remove: + connector.in_flight_orders.pop(order_id, None) + + async def sync_all_orders_to_database(self): + """ + Sync connector's in_flight_orders state to database for all trading connectors. + + The connector's built-in polling already updates in_flight_orders from the exchange. + This method syncs that state to our database and cleans up closed orders. + """ + for account_name, connectors in self._trading_connectors.items(): + for connector_name, connector in connectors.items(): + try: + if not connector.in_flight_orders: + continue + await self._sync_orders_to_database(connector, account_name, connector_name) + logger.debug(f"Synced order state to DB for {account_name}/{connector_name}") + except Exception as e: + logger.error(f"Error syncing order state for {account_name}/{connector_name}: {e}") + + def _convert_db_order_to_in_flight(self, order_record) -> InFlightOrder: + """Convert database order to InFlightOrder.""" + status_mapping = { + "SUBMITTED": OrderState.PENDING_CREATE, + "OPEN": OrderState.OPEN, + "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, + "FILLED": OrderState.FILLED, + "CANCELLED": OrderState.CANCELED, + "FAILED": OrderState.FAILED, + } + + order_state = status_mapping.get(order_record.status, OrderState.PENDING_CREATE) + + try: + order_type = OrderType[order_record.order_type] + except (KeyError, ValueError): + order_type = OrderType.LIMIT + + try: + trade_type = TradeType[order_record.trade_type] + except (KeyError, ValueError): + trade_type = TradeType.BUY + + creation_timestamp = ( + order_record.created_at.timestamp() + if order_record.created_at else time.time() + ) + + in_flight_order = InFlightOrder( + client_order_id=order_record.client_order_id, + trading_pair=order_record.trading_pair, + order_type=order_type, + trade_type=trade_type, + amount=Decimal(str(order_record.amount)), + creation_timestamp=creation_timestamp, + price=Decimal(str(order_record.price)) if order_record.price else None, + exchange_order_id=order_record.exchange_order_id, + initial_state=order_state, + leverage=1, + position=PositionAction.NIL, + ) + + in_flight_order.current_state = order_state + if order_record.filled_amount: + in_flight_order.executed_amount_base = Decimal(str(order_record.filled_amount)) + + return in_flight_order + + def _map_order_state_to_status(self, order_state: OrderState) -> str: + """Map OrderState to database status string.""" + mapping = { + OrderState.PENDING_CREATE: "SUBMITTED", + OrderState.OPEN: "OPEN", + OrderState.PENDING_CANCEL: "PENDING_CANCEL", + OrderState.CANCELED: "CANCELLED", + OrderState.PARTIALLY_FILLED: "PARTIALLY_FILLED", + OrderState.FILLED: "FILLED", + OrderState.FAILED: "FAILED", + OrderState.PENDING_APPROVAL: "SUBMITTED", + OrderState.APPROVED: "SUBMITTED", + OrderState.CREATED: "SUBMITTED", + OrderState.COMPLETED: "FILLED", + } + return mapping.get(order_state, "SUBMITTED") + + # ========================================================================= + # Metrics + # ========================================================================= + + def _initialize_metrics( + self, + connector: ConnectorBase, + account_name: str, + connector_name: str, + cache_key: str + ): + """Initialize trade volume metrics collector.""" + if cache_key in self._metrics_collectors: + return + + if "_paper_trade" in connector_name: + return + + try: + instance_id = f"{account_name}_hbotapi" + rate_provider = RateOracle.get_instance() + + metrics_collector = TradeVolumeMetricCollector( + connector=connector, + activation_interval=self.METRICS_ACTIVATION_INTERVAL, + rate_provider=rate_provider, + instance_id=instance_id, + valuation_token=self.METRICS_VALUATION_TOKEN + ) + metrics_collector.start() + self._metrics_collectors[cache_key] = metrics_collector + + except Exception as e: + logger.warning(f"Failed to init metrics for {connector_name}: {e}") + + # ========================================================================= + # Credentials and Configuration + # ========================================================================= + + async def update_connector_keys( + self, + account_name: str, + connector_name: str, + keys: dict + ) -> ConnectorBase: + """Update API keys and recreate connector.""" + if not BackendAPISecurity.login_account( + account_name=account_name, + secrets_manager=self.secrets_manager + ): + raise ValueError(f"Failed to authenticate for {account_name}") + + connector_config = HummingbotAPIConfigAdapter( + AllConnectorSettings.get_connector_config_keys(connector_name) + ) + + for key, value in keys.items(): + setattr(connector_config, key, value) + + BackendAPISecurity.update_connector_keys(account_name, connector_config) + BackendAPISecurity.decrypt_all(account_name=account_name) + + # Clear old connector + self.clear_trading_connector(account_name, connector_name) + + # Create new connector + return await self.get_trading_connector(account_name, connector_name) + + def clear_trading_connector( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None + ): + """Clear trading connector from cache.""" + if account_name and connector_name: + if account_name in self._trading_connectors: + self._trading_connectors[account_name].pop(connector_name, None) + elif account_name: + self._trading_connectors.pop(account_name, None) + else: + self._trading_connectors.clear() + + def list_account_connectors(self, account_name: str) -> List[str]: + """List initialized connectors for an account.""" + return list(self._trading_connectors.get(account_name, {}).keys()) + + def list_available_credentials(self, account_name: str) -> List[str]: + """List connector credentials available for an account.""" + try: + files = fs_util.list_files(f"credentials/{account_name}/connectors") + return [f.replace(".yml", "") for f in files if f.endswith(".yml")] + except FileNotFoundError: + return [] + + @staticmethod + def get_connector_config_map(connector_name: str): + """Get connector config field info.""" + from typing import Literal, get_args, get_origin + + connector_config = HummingbotAPIConfigAdapter( + AllConnectorSettings.get_connector_config_keys(connector_name) + ) + fields_info = {} + + for key, field in connector_config.hb_config.model_fields.items(): + if key == "connector": + continue + + field_type = field.annotation + type_name = getattr(field_type, "__name__", str(field_type)) + allowed_values = None + + origin = get_origin(field_type) + args = get_args(field_type) + + if origin is Literal: + type_name = "Literal" + allowed_values = list(args) + elif origin is not None: + if type(None) in args: + actual_types = [arg for arg in args if arg is not type(None)] + if actual_types: + inner_type = actual_types[0] + inner_origin = get_origin(inner_type) + inner_args = get_args(inner_type) + if inner_origin is Literal: + type_name = "Literal" + allowed_values = list(inner_args) + else: + type_name = getattr(inner_type, "__name__", str(inner_type)) + else: + type_name = str(field_type) + + field_info = {"type": type_name, "required": field.is_required()} + if allowed_values is not None: + field_info["allowed_values"] = allowed_values + fields_info[key] = field_info + + return fields_info + + # ========================================================================= + # Cleanup + # ========================================================================= + + async def stop_trading_connector(self, account_name: str, connector_name: str): + """Stop a trading connector and its services.""" + cache_key = f"{account_name}:{connector_name}" + + # Stop recorders + if cache_key in self._orders_recorders: + try: + await self._orders_recorders[cache_key].stop() + del self._orders_recorders[cache_key] + except Exception as e: + logger.error(f"Error stopping orders recorder: {e}") + + if cache_key in self._funding_recorders: + try: + await self._funding_recorders[cache_key].stop() + del self._funding_recorders[cache_key] + except Exception as e: + logger.error(f"Error stopping funding recorder: {e}") + + if cache_key in self._metrics_collectors: + try: + self._metrics_collectors[cache_key].stop() + del self._metrics_collectors[cache_key] + except Exception as e: + logger.error(f"Error stopping metrics: {e}") + + # Stop connector network + if account_name in self._trading_connectors: + connector = self._trading_connectors[account_name].get(connector_name) + if connector: + await self._stop_connector_network(connector) + del self._trading_connectors[account_name][connector_name] + + logger.info(f"Stopped trading connector {account_name}/{connector_name}") + + async def stop_all(self): + """Stop all connectors and services.""" + # Stop all trading connectors + for account_name, connectors in list(self._trading_connectors.items()): + for connector_name in list(connectors.keys()): + await self.stop_trading_connector(account_name, connector_name) + + # Stop data connectors + for connector_name, connector in self._data_connectors.items(): + try: + await connector.stop_network() + except Exception as e: + logger.error(f"Error stopping data connector {connector_name}: {e}") + + self._data_connectors.clear() + self._data_connectors_started.clear() + + logger.info("Stopped all connectors") diff --git a/utils/connector_manager.py b/utils/connector_manager.py deleted file mode 100644 index d9ca6e29..00000000 --- a/utils/connector_manager.py +++ /dev/null @@ -1,686 +0,0 @@ -import asyncio -import logging -import time -from decimal import Decimal -from typing import Dict, List, Optional - -# Create module-specific logger -logger = logging.getLogger(__name__) - -from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from hummingbot.client.config.config_helpers import get_connector_class -from hummingbot.client.settings import AllConnectorSettings -from hummingbot.connector.connector_base import ConnectorBase -from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType -from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState -from hummingbot.core.utils.async_utils import safe_ensure_future - -from utils.file_system import fs_util -from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter -from utils.security import BackendAPISecurity - - -class ConnectorManager: - """ - Manages the creation and caching of exchange connectors. - Handles connector configuration and initialization. - This is the single source of truth for all connector instances. - """ - - def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): - self.secrets_manager = secrets_manager - self.db_manager = db_manager - self._connector_cache: Dict[str, ConnectorBase] = {} - self._orders_recorders: Dict[str, any] = {} - self._funding_recorders: Dict[str, any] = {} - self._status_polling_tasks: Dict[str, asyncio.Task] = {} - - async def get_connector(self, account_name: str, connector_name: str): - """ - Get the connector object for the specified account and connector. - Uses caching to avoid recreating connectors unnecessarily. - Ensures proper initialization including position mode setup. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: The connector object. - """ - cache_key = f"{account_name}:{connector_name}" - - if cache_key in self._connector_cache: - return self._connector_cache[cache_key] - - # Create connector with full initialization - connector = await self._create_and_initialize_connector(account_name, connector_name) - return connector - - def _create_connector(self, account_name: str, connector_name: str): - """ - Create a new connector instance. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: The connector object. - """ - BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) - conn_setting = AllConnectorSettings.get_connector_settings()[connector_name] - keys = BackendAPISecurity.api_keys(connector_name) - - # Debug logging - logger.info(f"Creating connector {connector_name} for account {account_name}") - logger.debug(f"API keys retrieved: {list(keys.keys()) if keys else 'None'}") - - init_params = conn_setting.conn_init_parameters( - trading_pairs=[], - trading_required=True, - api_keys=keys, - ) - - # Debug logging - logger.debug(f"Init params keys: {list(init_params.keys())}") - - connector_class = get_connector_class(connector_name) - connector = connector_class(**init_params) - return connector - - def clear_cache(self, account_name: Optional[str] = None, connector_name: Optional[str] = None): - """ - Clear the connector cache. - - :param account_name: If provided, only clear cache for this account. - :param connector_name: If provided with account_name, only clear this specific connector. - """ - if account_name and connector_name: - cache_key = f"{account_name}:{connector_name}" - self._connector_cache.pop(cache_key, None) - elif account_name: - # Clear all connectors for this account - keys_to_remove = [k for k in self._connector_cache.keys() if k.startswith(f"{account_name}:")] - for key in keys_to_remove: - self._connector_cache.pop(key) - else: - # Clear entire cache - self._connector_cache.clear() - - @staticmethod - def get_connector_config_map(connector_name: str): - """ - Get the connector config map for the specified connector with type information. - - :param connector_name: The name of the connector. - :return: Dictionary mapping field names to their type information. - """ - from typing import Literal, get_args, get_origin - - connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) - fields_info = {} - - for key, field in connector_config.hb_config.model_fields.items(): - if key == "connector": - continue - - # Get the type annotation - field_type = field.annotation - type_name = getattr(field_type, "__name__", str(field_type)) - allowed_values = None - - # Handle Optional and Literal types - origin = get_origin(field_type) - args = get_args(field_type) - - if origin is Literal: - # It's a Literal type, extract the allowed values - type_name = "Literal" - allowed_values = list(args) - elif origin is not None: - # Handle Union types (Optional is Union[X, None]) - if type(None) in args: - # It's an Optional type, get the actual type - actual_types = [arg for arg in args if arg is not type(None)] - if actual_types: - inner_type = actual_types[0] - inner_origin = get_origin(inner_type) - inner_args = get_args(inner_type) - - if inner_origin is Literal: - # Optional[Literal[...]] - type_name = "Literal" - allowed_values = list(inner_args) - else: - type_name = getattr(inner_type, "__name__", str(inner_type)) - else: - type_name = str(field_type) - - field_info = { - "type": type_name, - "required": field.is_required(), - } - - if allowed_values is not None: - field_info["allowed_values"] = allowed_values - - fields_info[key] = field_info - - return fields_info - - async def update_connector_keys(self, account_name: str, connector_name: str, keys: dict): - """ - Update the API keys for a connector and refresh the connector instance. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :param keys: Dictionary of API keys to update. - :return: The updated connector instance. - """ - if not BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager): - raise ValueError(f"Failed to authenticate for account '{account_name}'. Password validation failed.") - connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) - - for key, value in keys.items(): - setattr(connector_config, key, value) - - BackendAPISecurity.update_connector_keys(account_name, connector_config) - - # Re-decrypt all credentials to ensure the new keys are available - BackendAPISecurity.decrypt_all(account_name=account_name) - - # Clear the cache for this connector to force recreation with new keys - self.clear_cache(account_name, connector_name) - - # Create and return new connector instance - new_connector = await self.get_connector(account_name, connector_name) - - return new_connector - - def list_account_connectors(self, account_name: str) -> List[str]: - """ - List all initialized connectors for a specific account. - - :param account_name: The name of the account. - :return: List of connector names. - """ - connectors = [] - for cache_key in self._connector_cache.keys(): - acc_name, conn_name = cache_key.split(":", 1) - if acc_name == account_name: - connectors.append(conn_name) - return connectors - - def get_all_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: - """ - Get all connectors organized by account. - - :return: Dictionary mapping account names to their connectors. - """ - result = {} - for cache_key, connector in self._connector_cache.items(): - account_name, connector_name = cache_key.split(":", 1) - if account_name not in result: - result[account_name] = {} - result[account_name][connector_name] = connector - return result - - def is_connector_initialized(self, account_name: str, connector_name: str) -> bool: - """ - Check if a connector is already initialized and cached. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: True if the connector is initialized, False otherwise. - """ - cache_key = f"{account_name}:{connector_name}" - return cache_key in self._connector_cache - - async def _create_and_initialize_connector(self, account_name: str, connector_name: str) -> ConnectorBase: - """ - Create and fully initialize a connector with all necessary setup. - This includes creating the connector, starting its network, setting up order recording, - and configuring position mode for perpetual connectors. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: The initialized connector instance. - """ - cache_key = f"{account_name}:{connector_name}" - # Create the base connector - connector = self._create_connector(account_name, connector_name) - - # Initialize symbol map - await connector._initialize_trading_pair_symbol_map() - - # Update trading rules - await connector._update_trading_rules() - - # Update initial balances - await connector._update_balances() - - # Set default position mode to HEDGE for perpetual connectors - if "_perpetual" in connector_name: - if PositionMode.HEDGE in connector.supported_position_modes(): - connector.set_position_mode(PositionMode.HEDGE) - await connector._update_positions() - - self._connector_cache[cache_key] = connector - - # Load existing orders from database before starting network - if self.db_manager: - await self._load_existing_orders_from_database(connector, account_name, connector_name) - - # Start order tracking if db_manager is available - if self.db_manager: - if cache_key not in self._orders_recorders: - # Import OrdersRecorder dynamically to avoid circular imports - from services.orders_recorder import OrdersRecorder - - # Create and start orders recorder - orders_recorder = OrdersRecorder(self.db_manager, account_name, connector_name) - orders_recorder.start(connector) - self._orders_recorders[cache_key] = orders_recorder - - # Start funding tracking for perpetual connectors - if "_perpetual" in connector_name and cache_key not in self._funding_recorders: - # Import FundingRecorder dynamically to avoid circular imports - from services.funding_recorder import FundingRecorder - - # Create and start funding recorder - funding_recorder = FundingRecorder(self.db_manager, account_name, connector_name) - funding_recorder.start(connector) - self._funding_recorders[cache_key] = funding_recorder - - # Start network manually without clock system - await self._start_connector_network(connector) - - # Perform initial update of connector state - await self._update_connector_state(connector, connector_name, account_name) - - logger.info(f"Initialized connector {connector_name} for account {account_name}") - return connector - - async def _start_connector_network(self, connector: ConnectorBase): - """ - Start connector network tasks manually without clock system. - Based on the original start_network method but without order book tracker. - """ - try: - # Stop any existing network tasks - await self._stop_connector_network(connector) - - # Start trading rules polling - connector._trading_rules_polling_task = safe_ensure_future(connector._trading_rules_polling_loop()) - - # Start trading fees polling - connector._trading_fees_polling_task = safe_ensure_future(connector._trading_fees_polling_loop()) - - # Start user stream tracker (websocket connection) - connector._user_stream_tracker_task = connector._create_user_stream_tracker_task() - - # Start user stream event listener - connector._user_stream_event_listener_task = safe_ensure_future(connector._user_stream_event_listener()) - - # Start lost orders update task - connector._lost_orders_update_task = safe_ensure_future(connector._lost_orders_update_polling_loop()) - - logger.info(f"Started connector network tasks for {connector}") - - except Exception as e: - logger.error(f"Error starting connector network: {e}") - raise - - async def _stop_connector_network(self, connector: ConnectorBase): - """ - Stop connector network tasks. - """ - try: - # Stop trading rules polling - if connector._trading_rules_polling_task: - connector._trading_rules_polling_task.cancel() - connector._trading_rules_polling_task = None - - # Stop trading fees polling - if connector._trading_fees_polling_task: - connector._trading_fees_polling_task.cancel() - connector._trading_fees_polling_task = None - - # Stop status polling - if connector._status_polling_task: - connector._status_polling_task.cancel() - connector._status_polling_task = None - - # Stop user stream tracker - if connector._user_stream_tracker_task: - connector._user_stream_tracker_task.cancel() - connector._user_stream_tracker_task = None - - # Stop user stream event listener - if connector._user_stream_event_listener_task: - connector._user_stream_event_listener_task.cancel() - connector._user_stream_event_listener_task = None - - # Stop lost orders update task - if connector._lost_orders_update_task: - connector._lost_orders_update_task.cancel() - connector._lost_orders_update_task = None - - except Exception as e: - logger.error(f"Error stopping connector network: {e}") - - async def _update_connector_state(self, connector: ConnectorBase, connector_name: str, account_name: str = None): - """ - Update connector state including balances, orders, positions, and trading rules. - This function can be called both during initialization and periodically. - - :param connector: The connector instance - :param connector_name: The name of the connector - :param account_name: The name of the account (optional, used for order sync) - """ - try: - # Update current timestamp - connector._set_current_timestamp(time.time()) - - # Update balances - await connector._update_balances() - - # Update trading rules - await connector._update_trading_rules() - - # Update positions for perpetual connectors - if "_perpetual" in connector_name: - await connector._update_positions() - - # Update order status for in-flight orders - if hasattr(connector, '_update_order_status') and connector.in_flight_orders: - await connector._update_order_status() - - # Sync updated order state to database and cleanup closed orders - if account_name: - await self._sync_orders_to_database(connector, account_name, connector_name) - - logger.debug(f"Updated connector state for {connector_name}") - - except Exception as e: - logger.error(f"Error updating connector state for {connector_name}: {e}") - - async def update_all_connector_states(self): - """ - Update state for all cached connectors. - This can be called periodically to refresh connector data. - """ - for cache_key, connector in self._connector_cache.items(): - account_name, connector_name = cache_key.split(":", 1) - try: - await self._update_connector_state(connector, connector_name, account_name) - except Exception as e: - logger.error(f"Error updating state for {account_name}/{connector_name}: {e}") - - async def sync_order_state_to_database_for_all_connectors(self): - """ - Sync connector's in_flight_orders state to database for all connectors. - - The connector's built-in _lost_orders_update_polling_loop already polls the exchange - and updates in_flight_orders. This method just syncs that state to our database - and cleans up closed orders. Called every minute. - """ - for cache_key, connector in self._connector_cache.items(): - account_name, connector_name = cache_key.split(":", 1) - try: - # Only process if there are in-flight orders - if not connector.in_flight_orders: - continue - - # Sync connector state to database and cleanup closed orders - await self._sync_orders_to_database(connector, account_name, connector_name) - logger.debug(f"Synced order state to DB for {account_name}/{connector_name}") - - except Exception as e: - logger.error(f"Error syncing order state for {account_name}/{connector_name}: {e}") - - async def _load_existing_orders_from_database(self, connector: ConnectorBase, account_name: str, connector_name: str): - """ - Load existing active orders from database and add them to connector's in_flight_orders. - This ensures that orders placed before an API restart can still be managed. - - :param connector: The connector instance to load orders into - :param account_name: The name of the account - :param connector_name: The name of the connector - """ - try: - # Import OrderRepository dynamically to avoid circular imports - from database import OrderRepository - - async with self.db_manager.get_session_context() as session: - order_repo = OrderRepository(session) - - # Get active orders from database for this account/connector - active_orders = await order_repo.get_active_orders(account_name=account_name, connector_name=connector_name) - - logger.info(f"Loading {len(active_orders)} existing active orders for {account_name}/{connector_name}") - - for order_record in active_orders: - try: - # Convert database order to InFlightOrder - in_flight_order = self._convert_db_order_to_in_flight_order(order_record) - - # Add to connector's in_flight_orders - connector.in_flight_orders[in_flight_order.client_order_id] = in_flight_order - - logger.debug(f"Loaded order {in_flight_order.client_order_id} from database into connector") - - except Exception as e: - logger.error(f"Error converting database order {order_record.client_order_id} to InFlightOrder: {e}") - continue - - logger.info( - f"Successfully loaded {len(connector.in_flight_orders)} in-flight orders for {account_name}/{connector_name}" - ) - - except Exception as e: - logger.error(f"Error loading existing orders from database for {account_name}/{connector_name}: {e}") - - def _map_order_state_to_status(self, order_state: OrderState) -> str: - """ - Map Hummingbot OrderState to database status string. - - :param order_state: The OrderState enum value from Hummingbot - :return: Database status string - """ - status_mapping = { - OrderState.PENDING_CREATE: "SUBMITTED", - OrderState.OPEN: "OPEN", - OrderState.PENDING_CANCEL: "PENDING_CANCEL", - OrderState.CANCELED: "CANCELLED", - OrderState.PARTIALLY_FILLED: "PARTIALLY_FILLED", - OrderState.FILLED: "FILLED", - OrderState.FAILED: "FAILED", - OrderState.PENDING_APPROVAL: "SUBMITTED", - OrderState.APPROVED: "SUBMITTED", - OrderState.CREATED: "SUBMITTED", - OrderState.COMPLETED: "FILLED", - } - return status_mapping.get(order_state, "SUBMITTED") - - async def _sync_orders_to_database(self, connector: ConnectorBase, account_name: str, connector_name: str): - """ - Sync connector's in_flight_orders state to database and cleanup closed orders. - - This method ensures that the database reflects the current state of orders - as reported by the exchange, and removes terminal orders from in_flight_orders. - - :param connector: The connector instance - :param account_name: The name of the account - :param connector_name: The name of the connector - """ - if not self.db_manager: - return - - terminal_states = [OrderState.FILLED, OrderState.CANCELED, OrderState.FAILED, OrderState.COMPLETED] - orders_to_remove = [] - - # Create a copy of keys to iterate safely while potentially modifying the dict - order_ids = list(connector.in_flight_orders.keys()) - - for client_order_id in order_ids: - order = connector.in_flight_orders.get(client_order_id) - if not order: - continue - - try: - # Import OrderRepository dynamically to avoid circular imports - from database import OrderRepository - - async with self.db_manager.get_session_context() as session: - order_repo = OrderRepository(session) - db_order = await order_repo.get_order_by_client_id(client_order_id) - - if db_order: - # Map connector state to database status - new_status = self._map_order_state_to_status(order.current_state) - - # Only update if status changed - if db_order.status != new_status: - await order_repo.update_order_status(client_order_id, new_status) - logger.info(f"Synced order {client_order_id} status: {db_order.status} -> {new_status}") - - # Mark terminal orders for removal from in_flight_orders - if order.current_state in terminal_states: - orders_to_remove.append(client_order_id) - - except Exception as e: - logger.error(f"Error syncing order {client_order_id} to database: {e}") - - # Remove terminal orders from in_flight_orders - for order_id in orders_to_remove: - connector.in_flight_orders.pop(order_id, None) - logger.debug(f"Removed closed order {order_id} from in_flight_orders") - - if orders_to_remove: - logger.info(f"Cleaned up {len(orders_to_remove)} terminal orders from {account_name}/{connector_name}") - - def _convert_db_order_to_in_flight_order(self, order_record) -> InFlightOrder: - """ - Convert a database Order record to a Hummingbot InFlightOrder object. - - :param order_record: Database Order model instance - :return: InFlightOrder instance - """ - # Map database status to OrderState - status_mapping = { - "SUBMITTED": OrderState.PENDING_CREATE, - "OPEN": OrderState.OPEN, - "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, - "FILLED": OrderState.FILLED, - "CANCELLED": OrderState.CANCELED, - "FAILED": OrderState.FAILED, - } - - # Get the appropriate OrderState - order_state = status_mapping.get(order_record.status, OrderState.PENDING_CREATE) - - # Convert string enums to proper enum instances - try: - order_type = OrderType[order_record.order_type] - except (KeyError, ValueError): - logger.warning(f"Unknown order type '{order_record.order_type}', defaulting to LIMIT") - order_type = OrderType.LIMIT - - try: - trade_type = TradeType[order_record.trade_type] - except (KeyError, ValueError): - logger.warning(f"Unknown trade type '{order_record.trade_type}', defaulting to BUY") - trade_type = TradeType.BUY - - # Convert creation timestamp - use order creation time or current time as fallback - creation_timestamp = order_record.created_at.timestamp() if order_record.created_at else time.time() - - # Create InFlightOrder instance - in_flight_order = InFlightOrder( - client_order_id=order_record.client_order_id, - trading_pair=order_record.trading_pair, - order_type=order_type, - trade_type=trade_type, - amount=Decimal(str(order_record.amount)), - creation_timestamp=creation_timestamp, - price=Decimal(str(order_record.price)) if order_record.price else None, - exchange_order_id=order_record.exchange_order_id, - initial_state=order_state, - leverage=1, # Default leverage - position=PositionAction.NIL, # Default position action - ) - - # Update current state and filled amount if order has progressed - in_flight_order.current_state = order_state - if order_record.filled_amount: - in_flight_order.executed_amount_base = Decimal(str(order_record.filled_amount)) - if order_record.average_fill_price: - in_flight_order.last_executed_quantity = Decimal(str(order_record.filled_amount or 0)) - in_flight_order.last_executed_price = Decimal(str(order_record.average_fill_price)) - - return in_flight_order - - async def stop_connector(self, account_name: str, connector_name: str): - """ - Stop a connector and its associated services. - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - """ - cache_key = f"{account_name}:{connector_name}" - - # Stop order recorder if exists - if cache_key in self._orders_recorders: - try: - await self._orders_recorders[cache_key].stop() - del self._orders_recorders[cache_key] - logger.info(f"Stopped order recorder for {account_name}/{connector_name}") - except Exception as e: - logger.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") - - # Stop funding recorder if exists - if cache_key in self._funding_recorders: - try: - await self._funding_recorders[cache_key].stop() - del self._funding_recorders[cache_key] - logger.info(f"Stopped funding recorder for {account_name}/{connector_name}") - except Exception as e: - logger.error(f"Error stopping funding recorder for {account_name}/{connector_name}: {e}") - - # Stop manual status polling task if exists - if cache_key in self._status_polling_tasks: - try: - self._status_polling_tasks[cache_key].cancel() - del self._status_polling_tasks[cache_key] - logger.info(f"Stopped manual status polling for {account_name}/{connector_name}") - except Exception as e: - logger.error(f"Error stopping manual status polling for {account_name}/{connector_name}: {e}") - - # Stop connector network if exists - if cache_key in self._connector_cache: - try: - connector = self._connector_cache[cache_key] - await self._stop_connector_network(connector) - logger.info(f"Stopped connector network for {account_name}/{connector_name}") - except Exception as e: - logger.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") - - async def stop_all_connectors(self): - """ - Stop all connectors and their associated services. - """ - # Get all account/connector pairs - pairs = [(k.split(":", 1)[0], k.split(":", 1)[1]) for k in self._connector_cache.keys()] - - # Stop each connector - for account_name, connector_name in pairs: - await self.stop_connector(account_name, connector_name) - - def list_available_credentials(self, account_name: str) -> List[str]: - """ - List all available connector credentials for an account. - - :param account_name: The name of the account. - :return: List of connector names that have credentials. - """ - try: - files = fs_util.list_files(f"credentials/{account_name}/connectors") - return [file.replace(".yml", "") for file in files if file.endswith(".yml")] - except FileNotFoundError: - return [] From 1befbe052c8df86b46fc57b45bacf7909d17d713 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:27:54 -0300 Subject: [PATCH 09/20] (feat) add trading service --- services/trading_service.py | 651 ++++++++++++++++++++++++++++++++++++ 1 file changed, 651 insertions(+) create mode 100644 services/trading_service.py diff --git a/services/trading_service.py b/services/trading_service.py new file mode 100644 index 00000000..c50bffd9 --- /dev/null +++ b/services/trading_service.py @@ -0,0 +1,651 @@ +""" +Trading Service - Centralized trading operations with executor-compatible interface. + +This service provides trading operations (buy, sell, cancel) using the +UnifiedConnectorService for connector management. +""" +import logging +import time +from decimal import Decimal +from typing import Dict, List, Optional, Set, TYPE_CHECKING + +from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction + +if TYPE_CHECKING: + from services.unified_connector_service import UnifiedConnectorService + from services.market_data_service import MarketDataService + + +logger = logging.getLogger(__name__) + + +class AccountTradingInterface: + """ + ScriptStrategyBase-compatible interface for executor trading. + + This class provides the exact interface that Hummingbot executors expect + from a strategy object, backed by UnifiedConnectorService. + + Executors use the following interface from strategy: + - current_timestamp: float property + - buy(connector_name, trading_pair, amount, order_type, price, position_action) -> str + - sell(connector_name, trading_pair, amount, order_type, price, position_action) -> str + - cancel(connector_name, trading_pair, order_id) -> str + - get_active_orders(connector_name) -> List + + ExecutorBase also accesses: + - connectors: Dict[str, ConnectorBase] (accessed directly in ExecutorBase.__init__) + """ + + def __init__( + self, + connector_service: "UnifiedConnectorService", + market_data_service: "MarketDataService", + account_name: str + ): + """ + Initialize AccountTradingInterface. + + Args: + connector_service: UnifiedConnectorService for connector access + market_data_service: MarketDataService for order book operations + account_name: Account to use for connectors + """ + self._connector_service = connector_service + self._market_data_service = market_data_service + self._account_name = account_name + + # Track active markets (connector_name -> set of trading_pairs) + self._markets: Dict[str, Set[str]] = {} + + # Timestamp tracking + self._current_timestamp: float = time.time() + + logger.info(f"AccountTradingInterface created for account: {account_name}") + + @property + def account_name(self) -> str: + """Return the account name for this trading interface.""" + return self._account_name + + @property + def connectors(self) -> Dict[str, ConnectorBase]: + """ + Return connectors for this account from the UnifiedConnectorService. + + This returns the actual connectors that are already initialized and running. + """ + return self._connector_service.get_account_connectors(self._account_name) + + @property + def markets(self) -> Dict[str, Set[str]]: + """Return active markets configuration.""" + return self._markets + + @property + def current_timestamp(self) -> float: + """Return current timestamp (updated by control loop).""" + return self._current_timestamp + + def update_timestamp(self): + """Update the current timestamp. Called by ExecutorService control loop.""" + self._current_timestamp = time.time() + + async def ensure_connector(self, connector_name: str) -> ConnectorBase: + """ + Ensure connector is loaded and available. + + Args: + connector_name: Name of the connector + + Returns: + The connector instance + """ + return await self._connector_service.get_trading_connector( + self._account_name, + connector_name + ) + + async def add_market( + self, + connector_name: str, + trading_pair: str, + order_book_timeout: float = 30.0 + ): + """ + Add a trading pair to active markets with full order book support. + + This method ensures: + 1. Connector is loaded + 2. Order book is initialized and has valid data + 3. Rate sources are initialized for price feeds + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to add + order_book_timeout: Timeout in seconds to wait for order book data + """ + await self.ensure_connector(connector_name) + + if connector_name not in self._markets: + self._markets[connector_name] = set() + + # Check if already tracking this pair AND order book is ready + if trading_pair in self._markets[connector_name]: + # Verify order book actually has data before returning early + connector = self.connectors.get(connector_name) + if connector and hasattr(connector, 'order_book_tracker'): + tracker = connector.order_book_tracker + if trading_pair in tracker.order_books: + try: + ob = tracker.order_books[trading_pair] + bids, asks = ob.snapshot + if len(bids) > 0 and len(asks) > 0: + logger.debug(f"Market {connector_name}/{trading_pair} already active with valid order book") + return + except Exception: + pass + # Order book not ready, need to re-initialize + logger.info(f"Market {connector_name}/{trading_pair} tracked but order book not ready, re-initializing") + + self._markets[connector_name].add(trading_pair) + + # Get connector from our account's connectors + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not available. Check credentials.") + + # Initialize order book via MarketDataService (uses best available connector) + logger.info(f"Initializing order book for {connector_name}/{trading_pair}") + success = await self._market_data_service.initialize_order_book( + connector_name=connector_name, + trading_pair=trading_pair, + account_name=self._account_name, + timeout=order_book_timeout + ) + + if success: + logger.info(f"Order book initialized successfully for {connector_name}/{trading_pair}") + else: + logger.warning(f"Order book initialization failed for {connector_name}/{trading_pair}") + + # Register trading pair with connector + self._register_trading_pair_with_connector(connector, trading_pair) + + logger.info(f"Market {connector_name}/{trading_pair} added to trading interface") + + async def remove_market( + self, + connector_name: str, + trading_pair: str, + remove_order_book: bool = True + ): + """ + Remove a trading pair from active markets. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to remove + remove_order_book: Whether to remove the order book (default True) + """ + if connector_name not in self._markets: + return + + self._markets[connector_name].discard(trading_pair) + if not self._markets[connector_name]: + del self._markets[connector_name] + + # Remove order book via MarketDataService + if remove_order_book: + try: + await self._market_data_service.remove_trading_pair( + connector_name=connector_name, + trading_pair=trading_pair, + account_name=self._account_name + ) + except Exception as e: + logger.warning(f"Failed to remove order book for {connector_name}/{trading_pair}: {e}") + + logger.info(f"Removed market {connector_name}/{trading_pair}") + + def _register_trading_pair_with_connector( + self, + connector: ConnectorBase, + trading_pair: str + ): + """ + Register a trading pair with the connector's internal structures. + + Args: + connector: The connector instance + trading_pair: Trading pair to register + """ + logger.debug(f"Registering {trading_pair} with connector {type(connector).__name__}") + + if hasattr(connector, '_trading_pairs'): + tp_type = type(connector._trading_pairs).__name__ + + if isinstance(connector._trading_pairs, set): + connector._trading_pairs.add(trading_pair) + elif isinstance(connector._trading_pairs, list): + if trading_pair not in connector._trading_pairs: + connector._trading_pairs.append(trading_pair) + elif isinstance(connector._trading_pairs, dict): + if trading_pair not in connector._trading_pairs: + base, quote = trading_pair.split("-") + try: + from hummingbot.connector.exchange.paper_trade.trading_pair import TradingPair + connector._trading_pairs[trading_pair] = TradingPair( + trading_pair=f"{base}{quote}", + base_asset=base, + quote_asset=quote + ) + except ImportError: + connector._trading_pairs[trading_pair] = trading_pair + + # ======================================== + # ScriptStrategyBase-compatible methods + # These are called by executors via self._strategy.method() + # ======================================== + + def buy( + self, + connector_name: str, + trading_pair: str, + amount: Decimal, + order_type: OrderType, + price: Decimal = Decimal("NaN"), + position_action: PositionAction = PositionAction.NIL + ) -> str: + """ + Place a buy order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + amount: Order amount in base currency + order_type: Type of order (LIMIT, MARKET, etc.) + price: Order price (for limit orders) + position_action: Position action for perpetuals + + Returns: + Client order ID + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.buy( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + position_action=position_action + ) + + def sell( + self, + connector_name: str, + trading_pair: str, + amount: Decimal, + order_type: OrderType, + price: Decimal = Decimal("NaN"), + position_action: PositionAction = PositionAction.NIL + ) -> str: + """ + Place a sell order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + amount: Order amount in base currency + order_type: Type of order (LIMIT, MARKET, etc.) + price: Order price (for limit orders) + position_action: Position action for perpetuals + + Returns: + Client order ID + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.sell( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price, + position_action=position_action + ) + + def cancel( + self, + connector_name: str, + trading_pair: str, + order_id: str + ) -> str: + """ + Cancel an order. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + order_id: Client order ID to cancel + + Returns: + Client order ID that was cancelled + """ + connector = self.connectors.get(connector_name) + if not connector: + raise ValueError(f"Connector {connector_name} not loaded. Call ensure_connector first.") + + return connector.cancel(trading_pair=trading_pair, client_order_id=order_id) + + def get_active_orders(self, connector_name: str) -> List: + """ + Get active orders for a connector. + + Args: + connector_name: Name of the connector + + Returns: + List of active in-flight orders + """ + connector = self.connectors.get(connector_name) + if not connector: + return [] + return list(connector.in_flight_orders.values()) + + # ======================================== + # Additional helper methods + # ======================================== + + def get_connector(self, connector_name: str) -> Optional[ConnectorBase]: + """ + Get a connector by name. + + Args: + connector_name: Name of the connector + + Returns: + The connector instance or None if not loaded + """ + return self.connectors.get(connector_name) + + def is_connector_loaded(self, connector_name: str) -> bool: + """ + Check if a connector is loaded. + + Args: + connector_name: Name of the connector + + Returns: + True if connector is loaded + """ + return connector_name in self.connectors + + def get_all_trading_pairs(self) -> Dict[str, Set[str]]: + """ + Get all active trading pairs by connector. + + Returns: + Dictionary mapping connector names to sets of trading pairs + """ + return {k: v.copy() for k, v in self._markets.items()} + + async def cleanup(self): + """ + Cleanup resources. Called when shutting down. + """ + self._markets.clear() + logger.info(f"AccountTradingInterface cleanup completed for account {self._account_name}") + + +class TradingService: + """ + Centralized trading service using UnifiedConnectorService. + + This service manages: + - Trading interfaces for each account (executor-compatible) + - Order placement and cancellation + - Position management for perpetuals + """ + + def __init__( + self, + connector_service: "UnifiedConnectorService", + market_data_service: "MarketDataService" + ): + """ + Initialize the TradingService. + + Args: + connector_service: UnifiedConnectorService for connector access + market_data_service: MarketDataService for order book operations + """ + self._connector_service = connector_service + self._market_data_service = market_data_service + + # Trading interfaces per account (for executor use) + self._trading_interfaces: Dict[str, AccountTradingInterface] = {} + + logger.info("TradingService initialized") + + # ==================== Trading Interface ==================== + + def get_trading_interface(self, account_name: str) -> AccountTradingInterface: + """ + Get or create a trading interface for the specified account. + + This interface provides ScriptStrategyBase-compatible methods + that executors can use for trading operations. + + Args: + account_name: Account to get trading interface for + + Returns: + AccountTradingInterface instance for the account + """ + if account_name not in self._trading_interfaces: + self._trading_interfaces[account_name] = AccountTradingInterface( + connector_service=self._connector_service, + market_data_service=self._market_data_service, + account_name=account_name + ) + return self._trading_interfaces[account_name] + + def get_all_trading_interfaces(self) -> Dict[str, AccountTradingInterface]: + """Get all active trading interfaces.""" + return self._trading_interfaces.copy() + + # ==================== Direct Trading Operations ==================== + + async def place_order( + self, + account_name: str, + connector_name: str, + trading_pair: str, + trade_type: TradeType, + amount: Decimal, + order_type: OrderType, + price: Optional[Decimal] = None, + position_action: PositionAction = PositionAction.NIL + ) -> str: + """ + Place an order on an exchange. + + Args: + account_name: Account to use + connector_name: Exchange connector name + trading_pair: Trading pair + trade_type: BUY or SELL + amount: Order amount + order_type: LIMIT, MARKET, etc. + price: Order price (required for LIMIT orders) + position_action: Position action for perpetuals + + Returns: + Client order ID + """ + interface = self.get_trading_interface(account_name) + await interface.ensure_connector(connector_name) + + if trade_type == TradeType.BUY: + return interface.buy( + connector_name=connector_name, + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price if price else Decimal("NaN"), + position_action=position_action + ) + else: + return interface.sell( + connector_name=connector_name, + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price if price else Decimal("NaN"), + position_action=position_action + ) + + async def cancel_order( + self, + account_name: str, + connector_name: str, + trading_pair: str, + order_id: str + ) -> str: + """ + Cancel an order. + + Args: + account_name: Account name + connector_name: Exchange connector name + trading_pair: Trading pair + order_id: Client order ID to cancel + + Returns: + Client order ID that was cancelled + """ + interface = self.get_trading_interface(account_name) + return interface.cancel(connector_name, trading_pair, order_id) + + def get_active_orders( + self, + account_name: str, + connector_name: str + ) -> List: + """ + Get active orders for an account/connector. + + Args: + account_name: Account name + connector_name: Exchange connector name + + Returns: + List of active orders + """ + interface = self.get_trading_interface(account_name) + return interface.get_active_orders(connector_name) + + # ==================== Position Management ==================== + + async def get_positions( + self, + account_name: str, + connector_name: str + ) -> Dict: + """ + Get positions for a perpetual connector. + + Args: + account_name: Account name + connector_name: Exchange connector name + + Returns: + Dictionary of positions + """ + connector = await self._connector_service.get_trading_connector( + account_name, connector_name + ) + + if hasattr(connector, 'account_positions'): + return { + str(pos.trading_pair): { + "trading_pair": pos.trading_pair, + "position_side": pos.position_side.name, + "unrealized_pnl": float(pos.unrealized_pnl), + "entry_price": float(pos.entry_price), + "amount": float(pos.amount), + "leverage": pos.leverage + } + for pos in connector.account_positions.values() + } + return {} + + async def set_leverage( + self, + account_name: str, + connector_name: str, + trading_pair: str, + leverage: int + ) -> bool: + """ + Set leverage for a trading pair on a perpetual connector. + + Args: + account_name: Account name + connector_name: Exchange connector name + trading_pair: Trading pair + leverage: Leverage value + + Returns: + True if successful + """ + connector = await self._connector_service.get_trading_connector( + account_name, connector_name + ) + + if hasattr(connector, 'set_leverage'): + try: + await connector.set_leverage(trading_pair, leverage) + logger.info(f"Set leverage to {leverage}x for {trading_pair} on {connector_name}") + return True + except Exception as e: + logger.error(f"Error setting leverage: {e}") + return False + return False + + # ==================== Lifecycle ==================== + + async def stop(self): + """Stop all trading interfaces and cleanup resources.""" + logger.info("Stopping TradingService...") + + for account_name, interface in self._trading_interfaces.items(): + try: + await interface.cleanup() + except Exception as e: + logger.error(f"Error cleaning up interface for {account_name}: {e}") + + self._trading_interfaces.clear() + logger.info("TradingService stopped") + + def update_all_timestamps(self): + """Update timestamps for all trading interfaces. Called by executor control loop.""" + for interface in self._trading_interfaces.values(): + interface.update_timestamp() + + # ==================== Properties ==================== + + @property + def connector_service(self) -> "UnifiedConnectorService": + """Get the connector service instance.""" + return self._connector_service + + @property + def market_data_service(self) -> "MarketDataService": + """Get the market data service instance.""" + return self._market_data_service From 87a30e3748fd4d8897e93a2b89d3d297a52b9be7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:28:02 -0300 Subject: [PATCH 10/20] (feat) add executor service and router --- routers/executors.py | 461 ++++++++++++++++++++++++++ services/executor_service.py | 608 +++++++++++++++++++++++++++++++++++ 2 files changed, 1069 insertions(+) create mode 100644 routers/executors.py create mode 100644 services/executor_service.py diff --git a/routers/executors.py b/routers/executors.py new file mode 100644 index 00000000..80887e57 --- /dev/null +++ b/routers/executors.py @@ -0,0 +1,461 @@ +""" +Executor Router - REST API endpoints for dynamic executor management. + +This router enables running Hummingbot executors directly via API +without Docker containers or full strategy setup. +""" +import logging + +from fastapi import APIRouter, Depends, HTTPException +from starlette import status + +from deps import get_executor_service +from models.executors import ( + CreateExecutorRequest, + CreateExecutorResponse, + DeleteExecutorResponse, + ExecutorDetailResponse, + ExecutorFilterRequest, + ExecutorResponse, + ExecutorsSummaryResponse, + StopExecutorRequest, + StopExecutorResponse, +) +from models.pagination import PaginatedResponse +from services.executor_service import ExecutorService + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=["Executors"], prefix="/executors") + + +@router.post("/", response_model=CreateExecutorResponse, status_code=status.HTTP_201_CREATED) +async def create_executor( + request: CreateExecutorRequest, + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Create and start a new executor. + + Supported executor types: + - **position_executor**: Single position with triple barrier (stop loss, take profit, time limit) + - **grid_executor**: Grid trading with multiple levels + - **dca_executor**: Dollar-cost averaging with multiple entry points + - **twap_executor**: Time-weighted average price execution + - **arbitrage_executor**: Cross-exchange arbitrage + - **xemm_executor**: Cross-exchange market making + - **order_executor**: Simple order execution + + The `executor_config` must include: + - `type`: One of the executor types above + - `connector_name`: Exchange connector (e.g., "binance", "binance_perpetual") + - `trading_pair`: Trading pair (e.g., "BTC-USDT") + - Additional type-specific configuration + + Returns the created executor ID and initial status. + """ + try: + result = await executor_service.create_executor( + executor_config=request.executor_config, + account_name=request.account_name + ) + return CreateExecutorResponse(**result) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error creating executor: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error creating executor: {str(e)}") + + +@router.post("/search", response_model=PaginatedResponse) +async def list_executors( + filter_request: ExecutorFilterRequest, + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Get list of active executors with optional filtering. + + Filters: + - `account_names`: Filter by specific accounts + - `connector_names`: Filter by connectors + - `trading_pairs`: Filter by trading pairs + - `executor_types`: Filter by executor types + - `status`: Filter by status (RUNNING, TERMINATED, etc.) + - `include_completed`: Include recently completed executors + + Returns paginated list of executor summaries. + """ + try: + # Get filtered executors + executors = executor_service.get_executors( + account_name=filter_request.account_names[0] if filter_request.account_names else None, + connector_name=filter_request.connector_names[0] if filter_request.connector_names else None, + trading_pair=filter_request.trading_pairs[0] if filter_request.trading_pairs else None, + executor_type=filter_request.executor_types[0] if filter_request.executor_types else None, + status=filter_request.status, + include_completed=filter_request.include_completed + ) + + # Apply additional multi-value filters + if filter_request.account_names and len(filter_request.account_names) > 1: + executors = [e for e in executors if e.get("account_name") in filter_request.account_names] + if filter_request.connector_names and len(filter_request.connector_names) > 1: + executors = [e for e in executors if e.get("connector_name") in filter_request.connector_names] + if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: + executors = [e for e in executors if e.get("trading_pair") in filter_request.trading_pairs] + if filter_request.executor_types and len(filter_request.executor_types) > 1: + executors = [e for e in executors if e.get("executor_type") in filter_request.executor_types] + + # Apply cursor-based pagination + start_idx = 0 + if filter_request.cursor: + for i, ex in enumerate(executors): + if ex.get("executor_id") == filter_request.cursor: + start_idx = i + 1 + break + + end_idx = start_idx + filter_request.limit + page_data = executors[start_idx:end_idx] + has_more = end_idx < len(executors) + next_cursor = page_data[-1]["executor_id"] if page_data and has_more else None + + return PaginatedResponse( + data=page_data, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(executors) + } + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error listing executors: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error listing executors: {str(e)}") + + +@router.get("/summary", response_model=ExecutorsSummaryResponse) +async def get_executors_summary( + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Get summary statistics for all executors. + + Returns aggregate information including: + - Total active/completed executor counts + - Total PnL and volume + - Breakdown by executor type, connector, and status + """ + try: + summary = executor_service.get_summary() + return ExecutorsSummaryResponse(**summary) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting executor summary: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting summary: {str(e)}") + + +@router.get("/{executor_id}", response_model=ExecutorDetailResponse) +async def get_executor( + executor_id: str, + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Get detailed information about a specific executor. + + Returns full executor information including: + - Current status and PnL + - Full configuration + - Executor-specific custom information + """ + try: + executor = executor_service.get_executor(executor_id) + + if not executor: + raise HTTPException(status_code=404, detail=f"Executor {executor_id} not found") + + return ExecutorDetailResponse(**executor) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting executor {executor_id}: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting executor: {str(e)}") + + +@router.post("/{executor_id}/stop", response_model=StopExecutorResponse) +async def stop_executor( + executor_id: str, + request: StopExecutorRequest, + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Stop an active executor. + + Options: + - `keep_position`: If true, keeps any open position (for position executors). + If false, the executor will attempt to close all positions before stopping. + + Returns confirmation of the stop action. + """ + try: + result = await executor_service.stop_executor( + executor_id=executor_id, + keep_position=request.keep_position + ) + return StopExecutorResponse(**result) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error stopping executor {executor_id}: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error stopping executor: {str(e)}") + + +@router.delete("/{executor_id}", response_model=DeleteExecutorResponse) +async def delete_executor( + executor_id: str, + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Remove an executor from tracking. + + The executor must be already stopped/completed. This removes it from + the active tracking list but preserves database records for historical queries. + + Returns success message if removed. + """ + try: + # Check if executor exists + executor = executor_service.get_executor(executor_id) + if not executor: + raise HTTPException(status_code=404, detail=f"Executor {executor_id} not found") + + # Check if still active + if executor.get("is_active", False): + raise HTTPException( + status_code=400, + detail=f"Cannot delete active executor. Stop it first using POST /executors/{executor_id}/stop" + ) + + # Remove from tracking + removed = executor_service.remove_completed_executor(executor_id) + if not removed: + raise HTTPException(status_code=404, detail=f"Executor {executor_id} not found in completed list") + + return DeleteExecutorResponse( + message=f"Executor {executor_id} removed from tracking", + executor_id=executor_id + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error deleting executor {executor_id}: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error deleting executor: {str(e)}") + + +@router.get("/types/available") +async def get_available_executor_types(): + """ + Get list of available executor types with descriptions. + + Returns information about each supported executor type. + """ + return { + "executor_types": [ + { + "type": "position_executor", + "description": "Single position with triple barrier (stop loss, take profit, time limit)", + "use_case": "Directional trading with risk management" + }, + { + "type": "grid_executor", + "description": "Grid trading with multiple buy/sell levels", + "use_case": "Range-bound market trading" + }, + { + "type": "dca_executor", + "description": "Dollar-cost averaging with multiple entry points", + "use_case": "Gradual position building" + }, + { + "type": "twap_executor", + "description": "Time-weighted average price execution", + "use_case": "Large order execution with minimal market impact" + }, + { + "type": "arbitrage_executor", + "description": "Cross-exchange price arbitrage", + "use_case": "Exploiting price differences between exchanges" + }, + { + "type": "xemm_executor", + "description": "Cross-exchange market making", + "use_case": "Providing liquidity across exchanges" + }, + { + "type": "order_executor", + "description": "Simple order execution with retry logic", + "use_case": "Basic order placement with reliability" + } + ] + } + + +def _extract_field_info(schema: dict, definitions: dict) -> list: + """ + Extract field information from a JSON schema. + + Returns list of field dicts with: name, type, description, required, default, constraints + """ + fields = [] + properties = schema.get("properties", {}) + required_fields = set(schema.get("required", [])) + + for field_name, field_schema in properties.items(): + # Skip internal fields + if field_name.startswith("_"): + continue + + field_info = { + "name": field_name, + "required": field_name in required_fields, + } + + # Resolve $ref if present + if "$ref" in field_schema: + ref_path = field_schema["$ref"].split("/")[-1] + if ref_path in definitions: + field_schema = {**definitions[ref_path], **field_schema} + del field_schema["$ref"] + + # Handle anyOf (usually Optional types) + if "anyOf" in field_schema: + types = [] + for option in field_schema["anyOf"]: + if "$ref" in option: + ref_name = option["$ref"].split("/")[-1] + types.append(ref_name) + elif option.get("type") == "null": + field_info["required"] = False + else: + types.append(option.get("type", "any")) + field_info["type"] = types[0] if len(types) == 1 else f"Union[{', '.join(types)}]" + elif "allOf" in field_schema: + # Handle allOf (usually inheritance) + refs = [opt["$ref"].split("/")[-1] for opt in field_schema["allOf"] if "$ref" in opt] + field_info["type"] = refs[0] if refs else "object" + elif "enum" in field_schema: + field_info["type"] = "enum" + field_info["enum_values"] = field_schema["enum"] + elif "type" in field_schema: + field_info["type"] = field_schema["type"] + else: + field_info["type"] = "any" + + # Extract description + if "description" in field_schema: + field_info["description"] = field_schema["description"] + elif "title" in field_schema: + field_info["description"] = field_schema["title"] + + # Extract default value + if "default" in field_schema: + field_info["default"] = field_schema["default"] + + # Extract constraints + constraints = {} + if "minimum" in field_schema: + constraints["minimum"] = field_schema["minimum"] + if "maximum" in field_schema: + constraints["maximum"] = field_schema["maximum"] + if "exclusiveMinimum" in field_schema: + constraints["exclusive_minimum"] = field_schema["exclusiveMinimum"] + if "exclusiveMaximum" in field_schema: + constraints["exclusive_maximum"] = field_schema["exclusiveMaximum"] + if "minLength" in field_schema: + constraints["min_length"] = field_schema["minLength"] + if "maxLength" in field_schema: + constraints["max_length"] = field_schema["maxLength"] + if "pattern" in field_schema: + constraints["pattern"] = field_schema["pattern"] + if "ge" in field_schema: + constraints["ge"] = field_schema["ge"] + if "le" in field_schema: + constraints["le"] = field_schema["le"] + if "gt" in field_schema: + constraints["gt"] = field_schema["gt"] + if "lt" in field_schema: + constraints["lt"] = field_schema["lt"] + + if constraints: + field_info["constraints"] = constraints + + fields.append(field_info) + + return fields + + +@router.get("/types/{executor_type}/config") +async def get_executor_config_schema(executor_type: str): + """ + Get configuration schema for a specific executor type. + + Returns detailed information about each configuration field including: + - **name**: Field name + - **type**: Data type (str, int, Decimal, enum, etc.) + - **description**: Field description + - **required**: Whether the field is required + - **default**: Default value if any + - **constraints**: Validation constraints (min, max, pattern, etc.) + - **enum_values**: Possible values for enum types + + Also returns nested type definitions for complex fields. + """ + from services.executor_service import ExecutorService + + if executor_type not in ExecutorService.EXECUTOR_REGISTRY: + raise HTTPException( + status_code=404, + detail=f"Unknown executor type '{executor_type}'. Valid types: {list(ExecutorService.EXECUTOR_REGISTRY.keys())}" + ) + + _, config_class = ExecutorService.EXECUTOR_REGISTRY[executor_type] + + try: + # Get JSON schema from pydantic model + schema = config_class.model_json_schema() + definitions = schema.get("$defs", {}) + + # Extract field information + fields = _extract_field_info(schema, definitions) + + # Extract nested type definitions + nested_types = {} + for def_name, def_schema in definitions.items(): + if "properties" in def_schema: + nested_types[def_name] = { + "description": def_schema.get("description", def_schema.get("title", "")), + "fields": _extract_field_info(def_schema, definitions) + } + elif "enum" in def_schema: + nested_types[def_name] = { + "type": "enum", + "values": def_schema["enum"], + "description": def_schema.get("description", def_schema.get("title", "")) + } + + return { + "executor_type": executor_type, + "config_class": config_class.__name__, + "description": schema.get("description", schema.get("title", "")), + "fields": fields, + "nested_types": nested_types + } + + except Exception as e: + logger.error(f"Error extracting config schema for {executor_type}: {e}", exc_info=True) + raise HTTPException( + status_code=500, + detail=f"Error extracting config schema: {str(e)}" + ) diff --git a/services/executor_service.py b/services/executor_service.py new file mode 100644 index 00000000..c51e0d9a --- /dev/null +++ b/services/executor_service.py @@ -0,0 +1,608 @@ +""" +ExecutorService manages executor lifecycle and orchestration. +This service enables running Hummingbot executors directly via API +without Docker containers or full strategy setup. +""" +import asyncio +import json +import logging +from datetime import datetime, timezone +from decimal import Decimal +from enum import Enum +from typing import Any, Dict, List, Optional, Type + +from fastapi import HTTPException + +def _json_default(obj): + """JSON serializer for objects not serializable by default (used for DB persistence).""" + if isinstance(obj, Decimal): + return float(obj) + if isinstance(obj, Enum): + return obj.name + raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable") + + +from hummingbot.strategy_v2.executors.arbitrage_executor.arbitrage_executor import ArbitrageExecutor +from hummingbot.strategy_v2.executors.arbitrage_executor.data_types import ArbitrageExecutorConfig +from hummingbot.strategy_v2.executors.data_types import ExecutorConfigBase +from hummingbot.strategy_v2.executors.dca_executor.dca_executor import DCAExecutor +from hummingbot.strategy_v2.executors.dca_executor.data_types import DCAExecutorConfig +from hummingbot.strategy_v2.executors.executor_base import ExecutorBase +from hummingbot.strategy_v2.executors.grid_executor.data_types import GridExecutorConfig +from hummingbot.strategy_v2.executors.grid_executor.grid_executor import GridExecutor +from hummingbot.strategy_v2.executors.order_executor.data_types import OrderExecutorConfig +from hummingbot.strategy_v2.executors.order_executor.order_executor import OrderExecutor +from hummingbot.strategy_v2.executors.position_executor.data_types import PositionExecutorConfig +from hummingbot.strategy_v2.executors.position_executor.position_executor import PositionExecutor +from hummingbot.strategy_v2.executors.twap_executor.data_types import TWAPExecutorConfig +from hummingbot.strategy_v2.executors.twap_executor.twap_executor import TWAPExecutor +from hummingbot.strategy_v2.executors.xemm_executor.data_types import XEMMExecutorConfig +from hummingbot.strategy_v2.executors.xemm_executor.xemm_executor import XEMMExecutor +from hummingbot.strategy_v2.models.base import RunnableStatus + +from database import AsyncDatabaseManager +from services.trading_service import TradingService, AccountTradingInterface + +logger = logging.getLogger(__name__) + + +class ExecutorService: + """ + Service for managing trading executors without Docker containers. + + This service provides: + - Dynamic executor creation for any market/connector + - Executor lifecycle management (start, stop, cleanup) + - Real-time executor status monitoring + - Database persistence of executor state and history + """ + + # Mapping of executor type strings to (executor_class, config_class) + EXECUTOR_REGISTRY: Dict[str, tuple[Type[ExecutorBase], Type[ExecutorConfigBase]]] = { + "position_executor": (PositionExecutor, PositionExecutorConfig), + "grid_executor": (GridExecutor, GridExecutorConfig), + "dca_executor": (DCAExecutor, DCAExecutorConfig), + "arbitrage_executor": (ArbitrageExecutor, ArbitrageExecutorConfig), + "twap_executor": (TWAPExecutor, TWAPExecutorConfig), + "xemm_executor": (XEMMExecutor, XEMMExecutorConfig), + "order_executor": (OrderExecutor, OrderExecutorConfig), + } + + def __init__( + self, + trading_service: TradingService, + db_manager: AsyncDatabaseManager, + default_account: str = "master_account", + update_interval: float = 1.0, + max_retries: int = 10 + ): + """ + Initialize ExecutorService. + + Args: + trading_service: TradingService for trading operations and interfaces + db_manager: AsyncDatabaseManager for persistence + default_account: Default account to use + update_interval: Executor update interval in seconds + max_retries: Maximum retries for executor operations + """ + self._trading_service = trading_service + self.db_manager = db_manager + self.default_account = default_account + self.update_interval = update_interval + self.max_retries = max_retries + + # Trading interfaces per account (lazy initialized via TradingService) + self._trading_interfaces: Dict[str, AccountTradingInterface] = {} + + # Active executors: executor_id -> executor instance + self._active_executors: Dict[str, ExecutorBase] = {} + + # Executor metadata: executor_id -> metadata dict + self._executor_metadata: Dict[str, Dict[str, Any]] = {} + + # Completed executors (kept for a period for queries) + self._completed_executors: Dict[str, Dict[str, Any]] = {} + + # Control loop task + self._control_loop_task: Optional[asyncio.Task] = None + self._is_running = False + + def start(self): + """Start the executor service control loop.""" + if not self._is_running: + self._is_running = True + self._control_loop_task = asyncio.create_task(self._control_loop()) + logger.info("ExecutorService started") + + async def stop(self): + """Stop the executor service and all active executors.""" + self._is_running = False + + if self._control_loop_task: + self._control_loop_task.cancel() + try: + await self._control_loop_task + except asyncio.CancelledError: + pass + self._control_loop_task = None + + # Stop all active executors + for executor_id in list(self._active_executors.keys()): + try: + executor = self._active_executors.get(executor_id) + if executor: + executor.stop() + except Exception as e: + logger.error(f"Error stopping executor {executor_id}: {e}") + + # Clear active executors + self._active_executors.clear() + self._executor_metadata.clear() + + # Cleanup trading interfaces + for trading_interface in self._trading_interfaces.values(): + await trading_interface.cleanup() + self._trading_interfaces.clear() + + logger.info("ExecutorService stopped") + + async def _control_loop(self): + """Main control loop that updates all active executors.""" + while self._is_running: + try: + # Update timestamps for all trading interfaces via TradingService + self._trading_service.update_all_timestamps() + + # Check for completed executors + completed_ids = [] + for executor_id, executor in self._active_executors.items(): + if executor.is_closed: + completed_ids.append(executor_id) + + # Handle completed executors + for executor_id in completed_ids: + await self._handle_executor_completion(executor_id) + + except Exception as e: + logger.error(f"Error in executor control loop: {e}", exc_info=True) + + await asyncio.sleep(self.update_interval) + + def _get_trading_interface(self, account_name: str) -> AccountTradingInterface: + """Get or create an AccountTradingInterface for the account.""" + if account_name not in self._trading_interfaces: + self._trading_interfaces[account_name] = self._trading_service.get_trading_interface(account_name) + return self._trading_interfaces[account_name] + + async def create_executor( + self, + executor_config: Dict[str, Any], + account_name: Optional[str] = None + ) -> Dict[str, Any]: + """ + Create and start a new executor. + + Args: + executor_config: Executor configuration dictionary (must include 'type') + account_name: Account to use (defaults to master_account) + + Returns: + Dictionary with executor_id and initial status + """ + account = account_name or self.default_account + + # Get executor type from config + executor_type = executor_config.get("type") + if not executor_type: + raise HTTPException( + status_code=400, + detail="executor_config must include 'type' field" + ) + + # Validate executor type + if executor_type not in self.EXECUTOR_REGISTRY: + raise HTTPException( + status_code=400, + detail=f"Invalid executor type '{executor_type}'. Valid types: {list(self.EXECUTOR_REGISTRY.keys())}" + ) + + # Get trading interface for this account + trading_interface = self._get_trading_interface(account) + + # Extract connector and trading pair from config + connector_name = executor_config.get("connector_name") + trading_pair = executor_config.get("trading_pair") + + if not connector_name: + raise HTTPException(status_code=400, detail="connector_name is required in executor_config") + if not trading_pair: + raise HTTPException(status_code=400, detail="trading_pair is required in executor_config") + + # Ensure connector and market are ready + await trading_interface.add_market(connector_name, trading_pair) + + # Set timestamp if not provided (required for time-based features like time_limit) + if "timestamp" not in executor_config or executor_config["timestamp"] is None: + executor_config["timestamp"] = trading_interface.current_timestamp + + # Create typed executor config + executor_class, config_class = self.EXECUTOR_REGISTRY[executor_type] + try: + typed_config = config_class(**executor_config) + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Invalid executor config: {str(e)}" + ) + + # Create the executor instance + try: + executor = executor_class( + strategy=trading_interface, + config=typed_config, + update_interval=self.update_interval, + max_retries=self.max_retries + ) + except Exception as e: + raise HTTPException( + status_code=400, + detail=f"Failed to create executor: {str(e)}" + ) + + # Store executor and metadata + executor_id = typed_config.id + self._active_executors[executor_id] = executor + self._executor_metadata[executor_id] = { + "account_name": account, + "connector_name": connector_name, + "trading_pair": trading_pair, + "executor_type": executor_type, + "created_at": datetime.now(timezone.utc), + "config": executor_config + } + + # Start the executor + executor.start() + + # Persist to database + await self._persist_executor_created(executor_id, executor) + + logger.info(f"Created {executor_type} executor {executor_id} for {connector_name}/{trading_pair}") + + return { + "executor_id": executor_id, + "executor_type": executor_type, + "connector_name": connector_name, + "trading_pair": trading_pair, + "status": executor.status.name, + "created_at": self._executor_metadata[executor_id]["created_at"].isoformat() + } + + def get_executors( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, + executor_type: Optional[str] = None, + status: Optional[str] = None, + include_completed: bool = False + ) -> List[Dict[str, Any]]: + """ + Get list of executors with optional filtering. + + Args: + account_name: Filter by account name + connector_name: Filter by connector name + trading_pair: Filter by trading pair + executor_type: Filter by executor type + status: Filter by status + include_completed: Include recently completed executors + + Returns: + List of executor information dictionaries + """ + result = [] + + # Process active executors + for executor_id, executor in self._active_executors.items(): + metadata = self._executor_metadata.get(executor_id, {}) + + # Apply filters + if account_name and metadata.get("account_name") != account_name: + continue + if connector_name and metadata.get("connector_name") != connector_name: + continue + if trading_pair and metadata.get("trading_pair") != trading_pair: + continue + if executor_type and metadata.get("executor_type") != executor_type: + continue + if status and executor.status.name != status: + continue + + result.append(self._format_executor_info(executor_id, executor)) + + # Include completed executors if requested + if include_completed: + for executor_id, completed_info in self._completed_executors.items(): + # Apply same filters to completed executors + if account_name and completed_info.get("account_name") != account_name: + continue + if connector_name and completed_info.get("connector_name") != connector_name: + continue + if trading_pair and completed_info.get("trading_pair") != trading_pair: + continue + if executor_type and completed_info.get("executor_type") != executor_type: + continue + + result.append(completed_info) + + return result + + def get_executor(self, executor_id: str) -> Optional[Dict[str, Any]]: + """ + Get detailed information about a specific executor. + + Args: + executor_id: The executor ID + + Returns: + Detailed executor information or None if not found + """ + # Check active executors first + executor = self._active_executors.get(executor_id) + if executor: + return self._format_executor_info(executor_id, executor) + + # Check completed executors + completed_info = self._completed_executors.get(executor_id) + if completed_info: + return completed_info + + return None + + async def stop_executor( + self, + executor_id: str, + keep_position: bool = False + ) -> Dict[str, Any]: + """ + Stop an active executor. + + Args: + executor_id: The executor ID to stop + keep_position: Whether to keep the position open + + Returns: + Dictionary with stop confirmation + """ + executor = self._active_executors.get(executor_id) + if not executor: + raise HTTPException(status_code=404, detail=f"Executor {executor_id} not found") + + if executor.is_closed: + raise HTTPException(status_code=400, detail=f"Executor {executor_id} is already closed") + + # Trigger early stop + try: + executor.early_stop(keep_position=keep_position) + except Exception as e: + logger.error(f"Error stopping executor {executor_id}: {e}") + raise HTTPException(status_code=500, detail=f"Error stopping executor: {str(e)}") + + logger.info(f"Initiated stop for executor {executor_id} (keep_position={keep_position})") + + return { + "executor_id": executor_id, + "status": "stopping", + "keep_position": keep_position + } + + async def _handle_executor_completion(self, executor_id: str): + """Handle cleanup when an executor completes.""" + executor = self._active_executors.get(executor_id) + if not executor: + return + + metadata = self._executor_metadata.get(executor_id, {}) + + # Format final executor info + final_info = self._format_executor_info(executor_id, executor) + final_info["completed_at"] = datetime.now(timezone.utc).isoformat() + + # Store in completed executors + self._completed_executors[executor_id] = final_info + + # Persist final state to database + await self._persist_executor_completed(executor_id, executor) + + # Remove from active executors + del self._active_executors[executor_id] + if executor_id in self._executor_metadata: + del self._executor_metadata[executor_id] + + close_type = executor.close_type.name if executor.close_type else "UNKNOWN" + logger.info(f"Executor {executor_id} completed with close_type: {close_type}") + + def _format_executor_info( + self, + executor_id: str, + executor: ExecutorBase + ) -> Dict[str, Any]: + """Format executor information for API response. + + Uses Pydantic's model_dump(mode='json') for automatic serialization + of Decimal, Enum, and other complex types. + """ + metadata = self._executor_metadata.get(executor_id, {}) + + try: + # Use Pydantic's built-in JSON serialization + executor_info = executor.executor_info + result = executor_info.model_dump(mode='json') + + # Add our metadata (not part of ExecutorInfo model) + result["executor_id"] = executor_id + result["executor_type"] = metadata.get("executor_type") + result["account_name"] = metadata.get("account_name") + result["created_at"] = metadata.get("created_at").isoformat() if metadata.get("created_at") else None + + # Ensure connector_name and trading_pair from metadata take precedence + if metadata.get("connector_name"): + result["connector_name"] = metadata.get("connector_name") + if metadata.get("trading_pair"): + result["trading_pair"] = metadata.get("trading_pair") + + return result + + except Exception as e: + # Fallback when executor_info validation fails (e.g., timestamp=None) + logger.debug(f"Error accessing executor_info for {executor_id}: {e}") + return { + "executor_id": executor_id, + "executor_type": metadata.get("executor_type"), + "account_name": metadata.get("account_name"), + "connector_name": metadata.get("connector_name"), + "trading_pair": metadata.get("trading_pair"), + "side": None, + "status": executor.status.name if hasattr(executor, 'status') else "UNKNOWN", + "is_active": not executor.is_closed if hasattr(executor, 'is_closed') else True, + "is_trading": False, + "timestamp": None, + "created_at": metadata.get("created_at").isoformat() if metadata.get("created_at") else None, + "close_type": executor.close_type.name if hasattr(executor, 'close_type') and executor.close_type else None, + "close_timestamp": None, + "controller_id": None, + "net_pnl_quote": 0.0, + "net_pnl_pct": 0.0, + "cum_fees_quote": 0.0, + "filled_amount_quote": 0.0, + "config": metadata.get("config"), + "custom_info": None, + } + + def get_summary(self) -> Dict[str, Any]: + """ + Get summary statistics for all executors. + + Returns: + Dictionary with aggregate statistics + """ + executors = self.get_executors(include_completed=True) + + active_count = sum(1 for e in executors if e.get("is_active", False)) + completed_count = len(executors) - active_count + total_pnl = sum(e.get("net_pnl_quote", 0) for e in executors) + total_volume = sum(e.get("filled_amount_quote", 0) for e in executors) + + by_type: Dict[str, int] = {} + by_connector: Dict[str, int] = {} + by_status: Dict[str, int] = {} + + for e in executors: + ex_type = e.get("executor_type", "unknown") + connector = e.get("connector_name", "unknown") + status = e.get("status", "unknown") + + by_type[ex_type] = by_type.get(ex_type, 0) + 1 + by_connector[connector] = by_connector.get(connector, 0) + 1 + by_status[status] = by_status.get(status, 0) + 1 + + return { + "total_active": active_count, + "total_completed": completed_count, + "total_pnl_quote": total_pnl, + "total_volume_quote": total_volume, + "by_type": by_type, + "by_connector": by_connector, + "by_status": by_status + } + + async def _persist_executor_created(self, executor_id: str, executor: ExecutorBase): + """Persist executor creation to database.""" + if not self.db_manager: + return + + try: + metadata = self._executor_metadata.get(executor_id, {}) + + async with self.db_manager.get_session_context() as session: + from database.repositories.executor_repository import ExecutorRepository + repo = ExecutorRepository(session) + + await repo.create_executor( + executor_id=executor_id, + executor_type=metadata.get("executor_type"), + account_name=metadata.get("account_name"), + connector_name=metadata.get("connector_name"), + trading_pair=metadata.get("trading_pair"), + config=json.dumps(metadata.get("config", {}), default=_json_default), + status=executor.status.name + ) + + logger.debug(f"Persisted executor {executor_id} creation to database") + + except Exception as e: + logger.error(f"Error persisting executor creation: {e}") + + async def _persist_executor_completed(self, executor_id: str, executor: ExecutorBase): + """Persist executor completion to database.""" + if not self.db_manager: + return + + try: + # Try to get executor_info, handle validation errors (e.g., timestamp=None) + try: + executor_info = executor.executor_info + status_name = executor_info.status.name + close_type = executor_info.close_type.name if executor_info.close_type else None + net_pnl_quote = executor_info.net_pnl_quote + net_pnl_pct = executor_info.net_pnl_pct + cum_fees_quote = executor_info.cum_fees_quote + filled_amount_quote = executor_info.filled_amount_quote + custom_info = executor_info.custom_info + except Exception as e: + # Fallback when executor_info validation fails + logger.debug(f"Error accessing executor_info for persistence: {e}") + status_name = executor.status.name if hasattr(executor, 'status') else "UNKNOWN" + close_type = executor.close_type.name if hasattr(executor, 'close_type') and executor.close_type else None + net_pnl_quote = Decimal("0") + net_pnl_pct = Decimal("0") + cum_fees_quote = Decimal("0") + filled_amount_quote = Decimal("0") + custom_info = None + + async with self.db_manager.get_session_context() as session: + from database.repositories.executor_repository import ExecutorRepository + repo = ExecutorRepository(session) + + await repo.update_executor( + executor_id=executor_id, + status=status_name, + close_type=close_type, + net_pnl_quote=net_pnl_quote, + net_pnl_pct=net_pnl_pct, + cum_fees_quote=cum_fees_quote, + filled_amount_quote=filled_amount_quote, + final_state=json.dumps(custom_info, default=_json_default) if custom_info else None + ) + + logger.debug(f"Persisted executor {executor_id} completion to database") + + except Exception as e: + logger.error(f"Error persisting executor completion: {e}") + + def remove_completed_executor(self, executor_id: str) -> bool: + """ + Remove a completed executor from tracking. + + Args: + executor_id: The executor ID to remove + + Returns: + True if removed, False if not found + """ + if executor_id in self._completed_executors: + del self._completed_executors[executor_id] + return True + return False From ae3fb50c9b400167ff59e02b6c8d768c0fc8cf57 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 20 Jan 2026 20:28:08 -0300 Subject: [PATCH 11/20] (feat) update imports --- main.py | 164 +++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 109 insertions(+), 55 deletions(-) diff --git a/main.py b/main.py index 80c18105..dddf29ea 100644 --- a/main.py +++ b/main.py @@ -41,8 +41,11 @@ def patched_save_to_yml(yml_path, cm): from services.accounts_service import AccountsService from services.docker_service import DockerService from services.gateway_service import GatewayService -from services.market_data_feed_manager import MarketDataFeedManager -# from services.executor_service import ExecutorService +from services.unified_connector_service import UnifiedConnectorService +from services.market_data_service import MarketDataService +from services.trading_service import TradingService +from services.executor_service import ExecutorService +from database import AsyncDatabaseManager from utils.bot_archiver import BotArchiver from routers import ( accounts, @@ -52,7 +55,7 @@ def patched_save_to_yml(yml_path, cm): connectors, controllers, docker, - # executors, + executors, gateway, gateway_swap, gateway_clmm, @@ -98,8 +101,11 @@ async def lifespan(app: FastAPI): BackendAPISecurity.store_password_verification(secrets_manager) logging.info("Created password verification file for master_account") - # Initialize GatewayHttpClient singleton with proper config from settings - # This must happen BEFORE MarketDataProvider is created, as it uses GatewayHttpClient.get_instance() + # ========================================================================= + # 1. Infrastructure Setup + # ========================================================================= + + # Initialize GatewayHttpClient singleton parsed_gateway_url = urlparse(settings.gateway.url) gateway_config = GatewayConfigMap( gateway_api_host=parsed_gateway_url.hostname or "localhost", @@ -109,8 +115,11 @@ async def lifespan(app: FastAPI): GatewayHttpClient.get_instance(gateway_config) logging.info(f"Initialized GatewayHttpClient with URL: {settings.gateway.url}") - # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) - market_data_provider = MarketDataProvider(connectors={}) + # Initialize secrets manager and database + secrets_manager = ETHKeyFileSecretManger(password=settings.security.config_password) + db_manager = AsyncDatabaseManager(settings.database.url) + await db_manager.create_tables() + logging.info("Database initialized") # Read rate oracle configuration from conf_client.yml from utils.file_system import FileSystemUtil @@ -138,26 +147,75 @@ async def lifespan(app: FastAPI): source_name = "binance" # Initialize RateOracle with configured source and quote token - rate_oracle_instance = RateOracle.get_instance() - rate_oracle_instance.source = rate_source - rate_oracle_instance.quote_token = quote_token + rate_oracle = RateOracle.get_instance() + rate_oracle.source = rate_source + rate_oracle.quote_token = quote_token except FileNotFoundError: logging.warning("conf_client.yml not found, using default RateOracle configuration (binance, USDT)") - rate_oracle_instance = RateOracle.get_instance() + rate_oracle = RateOracle.get_instance() except Exception as e: logging.warning(f"Error reading conf_client.yml: {e}, using default RateOracle configuration") - rate_oracle_instance = RateOracle.get_instance() + rate_oracle = RateOracle.get_instance() + + # ========================================================================= + # 2. UnifiedConnectorService - Single source of truth for all connectors + # ========================================================================= + + connector_service = UnifiedConnectorService( + secrets_manager=secrets_manager, + db_manager=db_manager + ) + logging.info("UnifiedConnectorService initialized") - # Initialize MarketDataFeedManager with lifecycle management - market_data_feed_manager = MarketDataFeedManager( - market_data_provider=market_data_provider, - rate_oracle=rate_oracle_instance, + # ========================================================================= + # 3. Services that depend on connector_service + # ========================================================================= + + # MarketDataService - candles, order books, prices + market_data_service = MarketDataService( + connector_service=connector_service, + rate_oracle=rate_oracle, cleanup_interval=settings.market_data.cleanup_interval, feed_timeout=settings.market_data.feed_timeout ) + logging.info("MarketDataService initialized") + + # TradingService - order placement, positions, trading interfaces + trading_service = TradingService( + connector_service=connector_service, + market_data_service=market_data_service + ) + logging.info("TradingService initialized") + + # AccountsService - account management, balances, portfolio (simplified) + accounts_service = AccountsService( + account_update_interval=settings.app.account_update_interval, + gateway_url=settings.gateway.url + ) + # Inject services into AccountsService + accounts_service._connector_service = connector_service + accounts_service._market_data_service = market_data_service + accounts_service._trading_service = trading_service + logging.info("AccountsService initialized") + + # ========================================================================= + # 4. ExecutorService - depends on TradingService (NO circular dependency) + # ========================================================================= + + executor_service = ExecutorService( + trading_service=trading_service, + db_manager=db_manager, + default_account="master_account", + update_interval=1.0, + max_retries=10 + ) + logging.info("ExecutorService initialized") + + # ========================================================================= + # 5. Other Services + # ========================================================================= - # Initialize services bots_orchestrator = BotsOrchestrator( broker_host=settings.broker.host, broker_port=settings.broker.port, @@ -165,11 +223,6 @@ async def lifespan(app: FastAPI): broker_password=settings.broker.password ) - accounts_service = AccountsService( - account_update_interval=settings.app.account_update_interval, - market_data_feed_manager=market_data_feed_manager, - gateway_url=settings.gateway.url - ) docker_service = DockerService() gateway_service = GatewayService() bot_archiver = BotArchiver( @@ -178,53 +231,54 @@ async def lifespan(app: FastAPI): settings.aws.s3_default_bucket_name ) - # Initialize database - await accounts_service.ensure_db_initialized() - - # # Initialize ExecutorService for running executors directly via API - # executor_service = ExecutorService( - # connector_manager=accounts_service.connector_manager, - # market_data_feed_manager=market_data_feed_manager, - # db_manager=accounts_service.db_manager, - # default_account="master_account", - # update_interval=1.0, - # max_retries=10 - # ) - # # Store reference in accounts_service for router access - # accounts_service._executor_service = executor_service - - # Store services in app state - app.state.bots_orchestrator = bots_orchestrator + # ========================================================================= + # 6. Store services in app state + # ========================================================================= + + app.state.db_manager = db_manager + app.state.connector_service = connector_service + app.state.market_data_service = market_data_service + app.state.trading_service = trading_service app.state.accounts_service = accounts_service + app.state.executor_service = executor_service + app.state.bots_orchestrator = bots_orchestrator app.state.docker_service = docker_service app.state.gateway_service = gateway_service app.state.bot_archiver = bot_archiver - app.state.market_data_feed_manager = market_data_feed_manager - # app.state.executor_service = executor_service - # Start services + # ========================================================================= + # 7. Start services + # ========================================================================= + bots_orchestrator.start() accounts_service.start() - market_data_feed_manager.start() - # executor_service.start() + market_data_service.start() + executor_service.start() + + # Initialize all trading connectors at startup + # This ensures orders are loaded into in_flight_orders and ready for management + logging.info("Initializing all trading connectors...") + await connector_service.initialize_all_trading_connectors() + + logging.info("All services started successfully") yield + # ========================================================================= # Shutdown services - bots_orchestrator.stop() - await accounts_service.stop() + # ========================================================================= - # Stop executor service - # await executor_service.stop() + logging.info("Shutting down services...") - # Stop market data feed manager (which will stop all feeds) - market_data_feed_manager.stop() - - # Clean up docker service + bots_orchestrator.stop() + await accounts_service.stop() + await executor_service.stop() + market_data_service.stop() + await connector_service.stop_all() docker_service.cleanup() + await db_manager.close() - # Close database connections - await accounts_service.db_manager.close() + logging.info("All services stopped") # Initialize FastAPI with metadata and lifespan @@ -310,7 +364,7 @@ def auth_user( app.include_router(rate_oracle.router, dependencies=[Depends(auth_user)]) app.include_router(backtesting.router, dependencies=[Depends(auth_user)]) app.include_router(archived_bots.router, dependencies=[Depends(auth_user)]) -# app.include_router(executors.router, dependencies=[Depends(auth_user)]) +app.include_router(executors.router, dependencies=[Depends(auth_user)]) @app.get("/") async def root(): From dab73ecc5f60c0fb6e6e65c2da8e3a4bcc809578 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 21 Jan 2026 15:45:16 -0300 Subject: [PATCH 12/20] (feat) fix ob start --- services/executor_service.py | 77 +++-- services/market_data_service.py | 42 +++ services/unified_connector_service.py | 403 +++++++++++++++++++------- 3 files changed, 397 insertions(+), 125 deletions(-) diff --git a/services/executor_service.py b/services/executor_service.py index c51e0d9a..fc060f95 100644 --- a/services/executor_service.py +++ b/services/executor_service.py @@ -13,15 +13,6 @@ from fastapi import HTTPException -def _json_default(obj): - """JSON serializer for objects not serializable by default (used for DB persistence).""" - if isinstance(obj, Decimal): - return float(obj) - if isinstance(obj, Enum): - return obj.name - raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable") - - from hummingbot.strategy_v2.executors.arbitrage_executor.arbitrage_executor import ArbitrageExecutor from hummingbot.strategy_v2.executors.arbitrage_executor.data_types import ArbitrageExecutorConfig from hummingbot.strategy_v2.executors.data_types import ExecutorConfigBase @@ -39,6 +30,7 @@ def _json_default(obj): from hummingbot.strategy_v2.executors.xemm_executor.data_types import XEMMExecutorConfig from hummingbot.strategy_v2.executors.xemm_executor.xemm_executor import XEMMExecutor from hummingbot.strategy_v2.models.base import RunnableStatus +from hummingbot.strategy_v2.models.executors import TrackedOrder from database import AsyncDatabaseManager from services.trading_service import TradingService, AccountTradingInterface @@ -46,6 +38,27 @@ def _json_default(obj): logger = logging.getLogger(__name__) +def _json_default(obj): + """JSON serializer for objects not serializable by default.""" + if isinstance(obj, Decimal): + return float(obj) + if isinstance(obj, Enum): + return obj.name + if isinstance(obj, TrackedOrder): + return { + "order_id": obj.order_id, + "price": float(obj.price) if obj.price else None, + "executed_amount_base": float(obj.executed_amount_base) if obj.executed_amount_base else 0.0, + "executed_amount_quote": float(obj.executed_amount_quote) if obj.executed_amount_quote else 0.0, + "is_filled": obj.is_filled if hasattr(obj, 'is_filled') else False, + "is_open": obj.is_open if hasattr(obj, 'is_open') else False, + } + # Handle Pydantic models + if hasattr(obj, 'model_dump'): + return obj.model_dump(mode='json') + raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable") + + class ExecutorService: """ Service for managing trading executors without Docker containers. @@ -437,9 +450,9 @@ def _format_executor_info( metadata = self._executor_metadata.get(executor_id, {}) try: - # Use Pydantic's built-in JSON serialization + # Use model_dump() then our custom serializer to handle TrackedOrder etc. executor_info = executor.executor_info - result = executor_info.model_dump(mode='json') + result = json.loads(json.dumps(executor_info.model_dump(), default=_json_default)) # Add our metadata (not part of ExecutorInfo model) result["executor_id"] = executor_id @@ -457,7 +470,35 @@ def _format_executor_info( except Exception as e: # Fallback when executor_info validation fails (e.g., timestamp=None) - logger.debug(f"Error accessing executor_info for {executor_id}: {e}") + logger.warning(f"Error accessing executor_info for {executor_id}: {e}") + + # Try to get real values directly from executor + try: + is_trading = executor.is_trading if hasattr(executor, 'is_trading') else False + except Exception: + is_trading = False + + try: + raw_custom_info = executor.get_custom_info() if hasattr(executor, 'get_custom_info') else None + # Convert to JSON-safe format (handles Decimals, Enums, etc.) + if raw_custom_info: + custom_info = json.loads(json.dumps(raw_custom_info, default=_json_default)) + else: + custom_info = None + except Exception: + custom_info = None + + try: + net_pnl_quote = float(executor.net_pnl_quote) if hasattr(executor, 'net_pnl_quote') else 0.0 + net_pnl_pct = float(executor.net_pnl_pct) if hasattr(executor, 'net_pnl_pct') else 0.0 + cum_fees_quote = float(executor.cum_fees_quote) if hasattr(executor, 'cum_fees_quote') else 0.0 + filled_amount_quote = float(executor.filled_amount_quote) if hasattr(executor, 'filled_amount_quote') else 0.0 + except Exception: + net_pnl_quote = 0.0 + net_pnl_pct = 0.0 + cum_fees_quote = 0.0 + filled_amount_quote = 0.0 + return { "executor_id": executor_id, "executor_type": metadata.get("executor_type"), @@ -467,18 +508,18 @@ def _format_executor_info( "side": None, "status": executor.status.name if hasattr(executor, 'status') else "UNKNOWN", "is_active": not executor.is_closed if hasattr(executor, 'is_closed') else True, - "is_trading": False, + "is_trading": is_trading, "timestamp": None, "created_at": metadata.get("created_at").isoformat() if metadata.get("created_at") else None, "close_type": executor.close_type.name if hasattr(executor, 'close_type') and executor.close_type else None, "close_timestamp": None, "controller_id": None, - "net_pnl_quote": 0.0, - "net_pnl_pct": 0.0, - "cum_fees_quote": 0.0, - "filled_amount_quote": 0.0, + "net_pnl_quote": net_pnl_quote, + "net_pnl_pct": net_pnl_pct, + "cum_fees_quote": cum_fees_quote, + "filled_amount_quote": filled_amount_quote, "config": metadata.get("config"), - "custom_info": None, + "custom_info": custom_info, } def get_summary(self) -> Dict[str, Any]: diff --git a/services/market_data_service.py b/services/market_data_service.py index e24bd39f..8a69fb94 100644 --- a/services/market_data_service.py +++ b/services/market_data_service.py @@ -699,3 +699,45 @@ def rate_oracle(self) -> RateOracle: def connector_service(self) -> "UnifiedConnectorService": """Get the connector service instance.""" return self._connector_service + + # ==================== Order Book Tracker Diagnostics ==================== + + def get_order_book_tracker_diagnostics( + self, + connector_name: str, + account_name: Optional[str] = None + ) -> Dict: + """ + Get diagnostics for a connector's order book tracker. + + Args: + connector_name: Exchange connector name + account_name: Optional account name for trading connector preference + + Returns: + Dictionary with diagnostic information + """ + return self._connector_service.get_order_book_tracker_diagnostics( + connector_name=connector_name, + account_name=account_name + ) + + async def restart_order_book_tracker( + self, + connector_name: str, + account_name: Optional[str] = None + ) -> Dict: + """ + Restart the order book tracker for a connector. + + Args: + connector_name: Exchange connector name + account_name: Optional account name for trading connector preference + + Returns: + Dictionary with restart status + """ + return await self._connector_service.restart_order_book_tracker( + connector_name=connector_name, + account_name=account_name + ) diff --git a/services/unified_connector_service.py b/services/unified_connector_service.py index 43904ae2..f473395a 100644 --- a/services/unified_connector_service.py +++ b/services/unified_connector_service.py @@ -405,36 +405,17 @@ async def initialize_order_book( # Wait for order book to have data return await self._wait_for_order_book(tracker, trading_pair, timeout) - def _ensure_order_book_tracker_started(self, connector: ConnectorBase) -> bool: - """Ensure the order book tracker is started for a connector. - - This is called lazily when the first trading pair is added, not at - connector initialization. Exchanges like Binance disconnect WebSocket - connections that have no subscriptions, so we must wait until we have - at least one trading pair before starting the tracker. - - Returns: - True if tracker is running (or was started), False if no tracker available - """ - if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: + def _is_tracker_running(self, tracker) -> bool: + """Check if the order book tracker is running.""" + if not tracker: return False - - tracker = connector.order_book_tracker - - # Check if already running - if hasattr(tracker, '_order_book_stream_listener_task') and tracker._order_book_stream_listener_task: + # Check if any of the main tasks exist and are not done + task = getattr(tracker, '_order_book_stream_listener_task', None) + if task and not task.done(): + return True + task = getattr(tracker, '_init_order_books_task', None) + if task and not task.done(): return True - - # Start the tracker - if hasattr(tracker, 'start'): - try: - tracker.start() - logger.info(f"Started order book tracker for {type(connector).__name__}") - return True - except Exception as e: - logger.error(f"Failed to start order book tracker: {e}") - return False - return False async def _add_trading_pair_to_tracker( @@ -444,102 +425,82 @@ async def _add_trading_pair_to_tracker( ) -> bool: """Add a trading pair to connector's order book tracker. - Uses the connector's add_trading_pair method which is now available on - ExchangePyBase (and PerpetualDerivativePyBase). This method handles: - - Order book initialization via order_book_tracker - - Funding info initialization for perpetual connectors + Simplified approach: + 1. If tracker is already running, use connector.add_trading_pair() + 2. Otherwise, register the pair first, then start the tracker - IMPORTANT: This method ensures trading pairs are added BEFORE starting - the order book tracker. Exchanges like Binance disconnect WebSockets - that have no subscriptions, so we must register the trading pair first. + The connector's add_trading_pair() method (from ExchangePyBase) handles: + - WebSocket subscription + - Order book snapshot fetching + - Tracking task creation """ try: - # CRITICAL: First register the trading pair with the tracker's internal set - # This must happen BEFORE starting the tracker so it knows what to subscribe to - if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: - tracker = connector.order_book_tracker - if hasattr(tracker, '_trading_pairs'): - was_added = self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) - if was_added: - logger.debug(f"Registered {trading_pair} with order book tracker's _trading_pairs") - - # Now ensure order book tracker is started (lazy initialization) - # The tracker will use _trading_pairs to know what to subscribe to - tracker_started = self._ensure_order_book_tracker_started(connector) - if not tracker_started: - logger.warning(f"Could not start order book tracker for {type(connector).__name__}") - - # FIX: Check if order book was initialized during tracker startup. - # This happens for the FIRST trading pair when tracker.start() calls _init_order_books(). - # Without this check, the code would continue to connector.add_trading_pair() which - # returns False (pair already exists), then fall through to the data source fallback - # which OVERWRITES the order book unnecessarily. - tracker = None - if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: - tracker = connector.order_book_tracker - if hasattr(tracker, 'order_books') and trading_pair in tracker.order_books: - try: - ob = tracker.order_books[trading_pair] - bids, asks = ob.snapshot - if len(bids) > 0 or len(asks) > 0: - logger.info(f"Order book for {trading_pair} initialized during tracker startup") - return True - except Exception: - pass # Order book exists but may not have data yet, continue - - # Try connector.add_trading_pair() first - available on ExchangePyBase - # and PerpetualDerivativePyBase (handles funding info automatically) - if hasattr(connector, 'add_trading_pair'): - try: - result = await connector.add_trading_pair(trading_pair) - if result: - logger.info(f"Added trading pair {trading_pair} via connector.add_trading_pair()") - return True - except Exception as e: - logger.debug(f"connector.add_trading_pair failed: {e}") + if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: + logger.warning(f"Connector {type(connector).__name__} has no order_book_tracker") + return False - # Fallback: Try order_book_tracker.add_trading_pair directly - # (for older connectors that don't have the base class method) - if hasattr(connector, 'order_book_tracker') and hasattr(connector.order_book_tracker, 'add_trading_pair'): - try: - result = await connector.order_book_tracker.add_trading_pair(trading_pair) + tracker = connector.order_book_tracker + + # Case 1: Tracker is already running and ready + if self._is_tracker_running(tracker) and tracker.ready: + # Check if pair is already tracked + if trading_pair in tracker.order_books: + logger.debug(f"Order book for {trading_pair} already exists") + return True + + # Use connector's add_trading_pair method for dynamic addition + if hasattr(connector, 'add_trading_pair'): + logger.info(f"Adding {trading_pair} to running tracker via connector.add_trading_pair()") + result = await connector.add_trading_pair(trading_pair) if result: - logger.info(f"Added trading pair {trading_pair} via order_book_tracker.add_trading_pair()") + logger.info(f"Successfully added {trading_pair} via connector.add_trading_pair()") return True - except Exception as e: - logger.debug(f"order_book_tracker.add_trading_pair failed: {e}") - - # Last resort fallback: Use orderbook data source to initialize order book directly - if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: - try: - orderbook_ds = connector._orderbook_ds - tracker = connector.order_book_tracker + else: + logger.warning(f"connector.add_trading_pair() returned False for {trading_pair}") - # Get initial order book from data source - order_book = await orderbook_ds.get_new_order_book(trading_pair) + # Case 2: Tracker not running - need to start it with this trading pair + else: + logger.info(f"Starting order book tracker for {type(connector).__name__} with {trading_pair}") - # Add to tracker's order_books dict - if hasattr(tracker, 'order_books'): - tracker.order_books[trading_pair] = order_book + # Register the trading pair FIRST (before starting tracker) + if hasattr(tracker, '_trading_pairs'): + self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) + logger.debug(f"Registered {trading_pair} with tracker._trading_pairs") - # Also add to trading pairs tracking - if hasattr(tracker, '_trading_pairs'): - self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) + # Start the tracker - it will initialize order books for registered pairs + if hasattr(tracker, 'start'): + tracker.start() + logger.info(f"Called tracker.start() for {type(connector).__name__}") - logger.info(f"Initialized order book for {trading_pair} via data source fallback") + # Wait for tracker to be ready + try: + await asyncio.wait_for(tracker.wait_ready(), timeout=30.0) + logger.info(f"Order book tracker ready for {type(connector).__name__}") + except asyncio.TimeoutError: + logger.warning(f"Timeout waiting for tracker to be ready, checking order book directly") + + # Verify order book was initialized + if trading_pair in tracker.order_books: + logger.info(f"Order book for {trading_pair} initialized during tracker startup") return True + # Fallback: Try to get order book snapshot directly + logger.info(f"Attempting fallback order book initialization for {trading_pair}") + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + try: + order_book = await connector._orderbook_ds.get_new_order_book(trading_pair) + tracker.order_books[trading_pair] = order_book + self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) + logger.info(f"Initialized order book for {trading_pair} via REST fallback") + return True except Exception as e: - logger.error(f"Failed to initialize order book via data source: {e}") + logger.error(f"Fallback order book initialization failed: {e}") - logger.warning( - f"Connector {type(connector).__name__} doesn't support " - f"dynamic trading pair addition" - ) + logger.error(f"Failed to add {trading_pair} to order book tracker") return False except Exception as e: - logger.error(f"Error adding trading pair {trading_pair}: {e}") + logger.error(f"Error adding trading pair {trading_pair}: {e}", exc_info=True) return False async def remove_trading_pair( @@ -635,6 +596,46 @@ async def _remove_trading_pair_from_tracker( logger.error(f"Error removing trading pair {trading_pair}: {e}") return False + async def _wait_for_websocket_ready( + self, + connector: ConnectorBase, + timeout: float = 10.0 + ) -> bool: + """Wait for the order book data source WebSocket to be connected. + + The tracker.start() method launches listen_for_subscriptions() which: + 1. Connects to WebSocket via _connected_websocket_assistant() + 2. Sets _ws_assistant reference + 3. Subscribes to channels + + We need to wait for _ws_assistant to be set before trying to + subscribe to new trading pairs dynamically. + + Args: + connector: The connector to check + timeout: Maximum time to wait in seconds + + Returns: + True if WebSocket is ready, False if timeout + """ + if not hasattr(connector, '_orderbook_ds') or not connector._orderbook_ds: + # No order book data source, can't check WebSocket + return True + + data_source = connector._orderbook_ds + waited = 0 + interval = 0.2 + + while waited < timeout: + if hasattr(data_source, '_ws_assistant') and data_source._ws_assistant is not None: + logger.debug(f"WebSocket ready for {type(connector).__name__}") + return True + await asyncio.sleep(interval) + waited += interval + + logger.warning(f"Timeout waiting for WebSocket connection on {type(connector).__name__}") + return False + async def _wait_for_order_book( self, tracker, @@ -1250,3 +1251,191 @@ async def stop_all(self): self._data_connectors_started.clear() logger.info("Stopped all connectors") + + # ========================================================================= + # Order Book Tracker Diagnostics & Restart + # ========================================================================= + + def get_order_book_tracker_diagnostics( + self, + connector_name: str, + account_name: Optional[str] = None + ) -> Dict: + """Get diagnostics for a connector's order book tracker. + + Returns information about: + - Whether the tracker is running + - Task status (alive/crashed) + - Metrics (diffs processed, last update, etc.) + - WebSocket status + + Args: + connector_name: The connector to diagnose + account_name: Optional account for trading connector + + Returns: + Dictionary with diagnostic information + """ + connector = self.get_best_connector_for_market(connector_name, account_name) + + if not connector: + return {"error": f"No connector found for {connector_name}"} + + diagnostics = { + "connector_type": type(connector).__name__, + "connector_name": connector_name, + "has_order_book_tracker": False, + "tracker_ready": False, + "tasks": {}, + "trading_pairs": [], + "order_books": {}, + "metrics": None, + "websocket_status": "unknown", + } + + if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: + return diagnostics + + tracker = connector.order_book_tracker + diagnostics["has_order_book_tracker"] = True + diagnostics["tracker_ready"] = tracker.ready if hasattr(tracker, 'ready') else False + + # Get trading pairs + if hasattr(tracker, '_trading_pairs'): + diagnostics["trading_pairs"] = list(tracker._trading_pairs) if isinstance(tracker._trading_pairs, (list, set)) else [] + + # Check task status + task_names = [ + '_order_book_stream_listener_task', + '_order_book_diff_listener_task', + '_order_book_trade_listener_task', + '_order_book_snapshot_listener_task', + '_order_book_diff_router_task', + '_order_book_snapshot_router_task', + '_init_order_books_task', + '_emit_trade_event_task', + ] + + for task_name in task_names: + task = getattr(tracker, task_name, None) + if task is not None: + diagnostics["tasks"][task_name] = { + "exists": True, + "done": task.done(), + "cancelled": task.cancelled(), + "exception": str(task.exception()) if task.done() and not task.cancelled() and task.exception() else None, + } + else: + diagnostics["tasks"][task_name] = {"exists": False} + + # Check order books + if hasattr(tracker, 'order_books'): + for trading_pair, order_book in tracker.order_books.items(): + try: + bids, asks = order_book.snapshot + best_bid = float(bids.iloc[0]['price']) if len(bids) > 0 else None + best_ask = float(asks.iloc[0]['price']) if len(asks) > 0 else None + diagnostics["order_books"][trading_pair] = { + "best_bid": best_bid, + "best_ask": best_ask, + "bid_count": len(bids), + "ask_count": len(asks), + "snapshot_uid": order_book.snapshot_uid if hasattr(order_book, 'snapshot_uid') else None, + "last_diff_uid": order_book.last_diff_uid if hasattr(order_book, 'last_diff_uid') else None, + } + except Exception as e: + diagnostics["order_books"][trading_pair] = {"error": str(e)} + + # Get metrics if available + if hasattr(tracker, 'metrics'): + try: + diagnostics["metrics"] = tracker.metrics.to_dict() + except Exception as e: + diagnostics["metrics"] = {"error": str(e)} + + # Check WebSocket status + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + data_source = connector._orderbook_ds + if hasattr(data_source, '_ws_assistant') and data_source._ws_assistant is not None: + diagnostics["websocket_status"] = "connected" + else: + diagnostics["websocket_status"] = "not_connected" + + return diagnostics + + async def restart_order_book_tracker( + self, + connector_name: str, + account_name: Optional[str] = None + ) -> Dict: + """Restart the order book tracker for a connector. + + This method: + 1. Stops the existing order book tracker + 2. Restarts it with the same trading pairs + + Args: + connector_name: The connector to restart + account_name: Optional account for trading connector + + Returns: + Dictionary with restart status + """ + connector = self.get_best_connector_for_market(connector_name, account_name) + + if not connector: + return {"success": False, "error": f"No connector found for {connector_name}"} + + if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: + return {"success": False, "error": "Connector has no order book tracker"} + + tracker = connector.order_book_tracker + + # Get existing trading pairs before stopping + trading_pairs = [] + if hasattr(tracker, '_trading_pairs'): + trading_pairs = list(tracker._trading_pairs) if isinstance(tracker._trading_pairs, (list, set)) else [] + + if not trading_pairs: + return {"success": False, "error": "No trading pairs to restart"} + + try: + # Stop the tracker + logger.info(f"Stopping order book tracker for {connector_name}...") + tracker.stop() + + # Wait a moment for cleanup + await asyncio.sleep(0.5) + + # Re-add trading pairs to tracker before restarting + if hasattr(tracker, '_trading_pairs'): + if isinstance(tracker._trading_pairs, set): + tracker._trading_pairs.clear() + for tp in trading_pairs: + tracker._trading_pairs.add(tp) + elif isinstance(tracker._trading_pairs, list): + tracker._trading_pairs.clear() + tracker._trading_pairs.extend(trading_pairs) + + # Restart the tracker + logger.info(f"Restarting order book tracker for {connector_name} with pairs: {trading_pairs}") + tracker.start() + + # Wait for initialization + try: + await asyncio.wait_for(tracker.wait_ready(), timeout=30.0) + except asyncio.TimeoutError: + logger.warning(f"Timeout waiting for tracker to be ready, continuing anyway...") + + # Wait for WebSocket to be ready + await self._wait_for_websocket_ready(connector, timeout=10.0) + + return { + "success": True, + "message": f"Order book tracker restarted for {connector_name}", + "trading_pairs": trading_pairs, + } + + except Exception as e: + logger.error(f"Error restarting order book tracker: {e}", exc_info=True) + return {"success": False, "error": str(e)} From 205240570d1ad772b59de289f8724df17044e9eb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 21 Jan 2026 15:45:35 -0300 Subject: [PATCH 13/20] (feat) restart ob tracker and state --- routers/market_data.py | 72 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/routers/market_data.py b/routers/market_data.py index 20771813..8fc6376c 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -539,3 +539,75 @@ async def remove_trading_pair( ) +# Order Book Tracker Diagnostics Endpoints + +@router.get("/order-book/diagnostics/{connector_name}") +async def get_order_book_diagnostics( + connector_name: str, + account_name: str = None, + market_data_service: MarketDataService = Depends(get_market_data_service) +): + """ + Get diagnostics for a connector's order book tracker. + + Returns detailed information about the order book tracker status including: + - Task status (running/crashed) + - WebSocket connection status + - Metrics (messages processed, latency, etc.) + - Current order book state + + Args: + connector_name: The connector to diagnose (e.g., "binance") + account_name: Optional account name for trading connectors + + Returns: + Diagnostic information dictionary + """ + try: + diagnostics = market_data_service.get_order_book_tracker_diagnostics( + connector_name=connector_name, + account_name=account_name + ) + return diagnostics + except Exception as e: + raise HTTPException( + status_code=500, + detail=f"Error getting diagnostics: {str(e)}" + ) + + +@router.post("/order-book/restart/{connector_name}") +async def restart_order_book_tracker( + connector_name: str, + account_name: str = None, + market_data_service: MarketDataService = Depends(get_market_data_service) +): + """ + Restart the order book tracker for a connector. + + Use this endpoint when the order book is stale (WebSocket disconnected). + This will: + 1. Stop the existing order book tracker + 2. Restart it with the same trading pairs + 3. Wait for the WebSocket to reconnect + + Args: + connector_name: The connector to restart (e.g., "binance") + account_name: Optional account name for trading connectors + + Returns: + Restart status with success/failure and trading pairs + """ + try: + result = await market_data_service.restart_order_book_tracker( + connector_name=connector_name, + account_name=account_name + ) + return result + except Exception as e: + raise HTTPException( + status_code=500, + detail=f"Error restarting order book tracker: {str(e)}" + ) + + From 98cf59e8a38b9d62eb9db8a55372b442a519ec6c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 22 Jan 2026 12:18:30 -0300 Subject: [PATCH 14/20] (feat) add position hold --- routers/executors.py | 118 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) diff --git a/routers/executors.py b/routers/executors.py index 80887e57..4c209972 100644 --- a/routers/executors.py +++ b/routers/executors.py @@ -18,6 +18,8 @@ ExecutorFilterRequest, ExecutorResponse, ExecutorsSummaryResponse, + PositionHoldResponse, + PositionsSummaryResponse, StopExecutorRequest, StopExecutorResponse, ) @@ -254,6 +256,122 @@ async def delete_executor( raise HTTPException(status_code=500, detail=f"Error deleting executor: {str(e)}") +# ======================================== +# Position Hold Endpoints +# ======================================== + +@router.get("/positions/summary", response_model=PositionsSummaryResponse) +async def get_positions_summary( + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Get summary of all held positions from executors stopped with keep_position=True. + + Returns aggregate information including: + - Total number of active position holds + - Total realized PnL across all positions + - List of all positions with breakeven prices and PnL + """ + try: + summary = executor_service.get_positions_summary() + return PositionsSummaryResponse(**summary) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting positions summary: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting positions summary: {str(e)}") + + +@router.get("/positions/{connector_name}/{trading_pair}", response_model=PositionHoldResponse) +async def get_position_held( + connector_name: str, + trading_pair: str, + account_name: str = "master_account", + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Get held position for a specific connector/trading pair. + + Returns the aggregated position from executors stopped with keep_position=True, + including breakeven prices, matched/unmatched volume, and realized PnL. + """ + try: + position = executor_service.get_position_held( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair + ) + + if not position: + raise HTTPException( + status_code=404, + detail=f"No position hold found for {connector_name}/{trading_pair}" + ) + + return PositionHoldResponse( + trading_pair=position.trading_pair, + connector_name=position.connector_name, + account_name=position.account_name, + buy_amount_base=float(position.buy_amount_base), + buy_amount_quote=float(position.buy_amount_quote), + sell_amount_base=float(position.sell_amount_base), + sell_amount_quote=float(position.sell_amount_quote), + net_amount_base=float(position.net_amount_base), + buy_breakeven_price=float(position.buy_breakeven_price) if position.buy_breakeven_price else None, + sell_breakeven_price=float(position.sell_breakeven_price) if position.sell_breakeven_price else None, + matched_amount_base=float(position.matched_amount_base), + unmatched_amount_base=float(position.unmatched_amount_base), + position_side=position.position_side, + realized_pnl_quote=float(position.realized_pnl_quote), + executor_count=len(position.executor_ids), + executor_ids=position.executor_ids, + last_updated=position.last_updated.isoformat() if position.last_updated else None + ) + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting position: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting position: {str(e)}") + + +@router.delete("/positions/{connector_name}/{trading_pair}") +async def clear_position_held( + connector_name: str, + trading_pair: str, + account_name: str = "master_account", + executor_service: ExecutorService = Depends(get_executor_service) +): + """ + Clear a held position (after manual close or full exit). + + This removes the position from tracking but preserves historical data + in completed executors. + """ + try: + cleared = executor_service.clear_position_held( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair + ) + + if not cleared: + raise HTTPException( + status_code=404, + detail=f"No position hold found for {connector_name}/{trading_pair}" + ) + + return { + "message": f"Position hold for {connector_name}/{trading_pair} cleared", + "connector_name": connector_name, + "trading_pair": trading_pair + } + except HTTPException: + raise + except Exception as e: + logger.error(f"Error clearing position: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error clearing position: {str(e)}") + + @router.get("/types/available") async def get_available_executor_types(): """ From 9f8ee26c9f98a1ac47105a41c9cba062d7de9c53 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 22 Jan 2026 12:19:09 -0300 Subject: [PATCH 15/20] (feat) clean up code --- services/accounts_service.py | 41 +++--------------------------------- services/trading_service.py | 35 +++++++----------------------- 2 files changed, 11 insertions(+), 65 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 9a7a883e..82ba5d9e 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -227,44 +227,9 @@ def _register_trading_pair_with_connector( connector: The connector instance trading_pair: Trading pair to register """ - logger.info(f"Registering {trading_pair} with connector {type(connector).__name__}") - - # Add to connector's _trading_pairs if it exists - if hasattr(connector, '_trading_pairs'): - tp_type = type(connector._trading_pairs).__name__ - logger.info(f"Connector has _trading_pairs of type: {tp_type}") - - if isinstance(connector._trading_pairs, set): - connector._trading_pairs.add(trading_pair) - logger.info(f"Added {trading_pair} to connector._trading_pairs (set)") - elif isinstance(connector._trading_pairs, list): - if trading_pair not in connector._trading_pairs: - connector._trading_pairs.append(trading_pair) - logger.info(f"Added {trading_pair} to connector._trading_pairs (list)") - elif isinstance(connector._trading_pairs, dict): - # For paper trade or similar connectors that use a dict - if trading_pair not in connector._trading_pairs: - base, quote = trading_pair.split("-") - # Import TradingPair if needed for paper trade - try: - from hummingbot.connector.exchange.paper_trade.trading_pair import TradingPair - connector._trading_pairs[trading_pair] = TradingPair( - trading_pair=f"{base}{quote}", - base_asset=base, - quote_asset=quote - ) - logger.info(f"Added {trading_pair} to connector._trading_pairs (dict)") - except ImportError: - connector._trading_pairs[trading_pair] = trading_pair - logger.info(f"Added {trading_pair} to connector._trading_pairs (dict, simple)") - else: - logger.warning(f"Connector {type(connector).__name__} does not have _trading_pairs attribute") - - # Also check if order_book_tracker has the pair - if hasattr(connector, 'order_book_tracker'): - tracker = connector.order_book_tracker - has_ob = trading_pair in tracker.order_books if hasattr(tracker, 'order_books') else False - logger.info(f"Order book tracker has {trading_pair}: {has_ob}") + if trading_pair not in connector._trading_pairs: + connector._trading_pairs.append(trading_pair) + logger.debug(f"Registered {trading_pair} with connector {type(connector).__name__}") async def remove_market( self, diff --git a/services/trading_service.py b/services/trading_service.py index c50bffd9..b3dac601 100644 --- a/services/trading_service.py +++ b/services/trading_service.py @@ -165,10 +165,10 @@ async def add_market( timeout=order_book_timeout ) - if success: - logger.info(f"Order book initialized successfully for {connector_name}/{trading_pair}") - else: - logger.warning(f"Order book initialization failed for {connector_name}/{trading_pair}") + if not success: + raise ValueError(f"Failed to initialize order book for {connector_name}/{trading_pair}") + + logger.info(f"Order book initialized successfully for {connector_name}/{trading_pair}") # Register trading pair with connector self._register_trading_pair_with_connector(connector, trading_pair) @@ -218,31 +218,12 @@ def _register_trading_pair_with_connector( Register a trading pair with the connector's internal structures. Args: - connector: The connector instance + connector: The connector instance (ExchangePyBase) trading_pair: Trading pair to register """ - logger.debug(f"Registering {trading_pair} with connector {type(connector).__name__}") - - if hasattr(connector, '_trading_pairs'): - tp_type = type(connector._trading_pairs).__name__ - - if isinstance(connector._trading_pairs, set): - connector._trading_pairs.add(trading_pair) - elif isinstance(connector._trading_pairs, list): - if trading_pair not in connector._trading_pairs: - connector._trading_pairs.append(trading_pair) - elif isinstance(connector._trading_pairs, dict): - if trading_pair not in connector._trading_pairs: - base, quote = trading_pair.split("-") - try: - from hummingbot.connector.exchange.paper_trade.trading_pair import TradingPair - connector._trading_pairs[trading_pair] = TradingPair( - trading_pair=f"{base}{quote}", - base_asset=base, - quote_asset=quote - ) - except ImportError: - connector._trading_pairs[trading_pair] = trading_pair + if trading_pair not in connector._trading_pairs: + connector._trading_pairs.append(trading_pair) + logger.debug(f"Registered {trading_pair} with connector {type(connector).__name__}") # ======================================== # ScriptStrategyBase-compatible methods From 8e7ac76cdd34256746c39fddd2d0712c207f7557 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 22 Jan 2026 12:19:22 -0300 Subject: [PATCH 16/20] (feat) add position hold --- models/executors.py | 186 +++++++++++++++++++++++++- services/executor_service.py | 247 ++++++++++++++++++++++++++++++++++- 2 files changed, 431 insertions(+), 2 deletions(-) diff --git a/models/executors.py b/models/executors.py index 7d892e30..0822b390 100644 --- a/models/executors.py +++ b/models/executors.py @@ -5,13 +5,197 @@ validation for the REST API. """ from datetime import datetime +from decimal import Decimal from typing import Any, Dict, List, Literal, Optional -from pydantic import BaseModel, ConfigDict, Field +from pydantic import BaseModel, ConfigDict, Field, computed_field from .pagination import PaginationParams +# ======================================== +# Position Hold for Aggregated Tracking +# ======================================== + +class PositionHold(BaseModel): + """ + Tracks aggregated position from executors stopped with keep_position=True. + + Similar to hummingbot's PositionHold, this tracks: + - Separate buy/sell amounts for proper breakeven calculation + - Matched volume (realized PnL) vs unmatched volume (unrealized PnL) + - Aggregation across multiple executors on the same trading pair + """ + model_config = ConfigDict(arbitrary_types_allowed=True) + + trading_pair: str = Field(description="Trading pair (e.g., 'BTC-USDT')") + connector_name: str = Field(description="Connector name") + account_name: str = Field(description="Account name") + + # Buy side tracking + buy_amount_base: Decimal = Field(default=Decimal("0"), description="Total bought amount in base currency") + buy_amount_quote: Decimal = Field(default=Decimal("0"), description="Total spent on buys in quote currency") + + # Sell side tracking + sell_amount_base: Decimal = Field(default=Decimal("0"), description="Total sold amount in base currency") + sell_amount_quote: Decimal = Field(default=Decimal("0"), description="Total received from sells in quote currency") + + # Realized PnL from matched positions + realized_pnl_quote: Decimal = Field(default=Decimal("0"), description="Realized PnL from matched buy/sell pairs") + + # Tracking + executor_ids: List[str] = Field(default_factory=list, description="IDs of executors contributing to this position") + last_updated: Optional[datetime] = Field(default=None, description="Last update timestamp") + + @computed_field + @property + def net_amount_base(self) -> Decimal: + """Net position in base currency (positive = long, negative = short).""" + return self.buy_amount_base - self.sell_amount_base + + @computed_field + @property + def buy_breakeven_price(self) -> Optional[Decimal]: + """Average buy price (breakeven for long position).""" + if self.buy_amount_base > 0: + return self.buy_amount_quote / self.buy_amount_base + return None + + @computed_field + @property + def sell_breakeven_price(self) -> Optional[Decimal]: + """Average sell price (breakeven for short position).""" + if self.sell_amount_base > 0: + return self.sell_amount_quote / self.sell_amount_base + return None + + @computed_field + @property + def matched_amount_base(self) -> Decimal: + """Amount that has been matched (min of buy/sell).""" + return min(self.buy_amount_base, self.sell_amount_base) + + @computed_field + @property + def unmatched_amount_base(self) -> Decimal: + """Absolute unmatched position size.""" + return abs(self.net_amount_base) + + @computed_field + @property + def position_side(self) -> Optional[str]: + """Current position side: LONG, SHORT, or FLAT.""" + if self.net_amount_base > 0: + return "LONG" + elif self.net_amount_base < 0: + return "SHORT" + return "FLAT" + + def add_fill( + self, + side: str, + amount_base: Decimal, + amount_quote: Decimal, + executor_id: Optional[str] = None + ): + """ + Add a fill to the position tracking. + + Args: + side: "BUY" or "SELL" + amount_base: Amount in base currency + amount_quote: Amount in quote currency + executor_id: Optional executor ID to track + """ + if side.upper() == "BUY": + self.buy_amount_base += amount_base + self.buy_amount_quote += amount_quote + else: + self.sell_amount_base += amount_base + self.sell_amount_quote += amount_quote + + # Calculate realized PnL when we have matched volume + self._calculate_realized_pnl() + + if executor_id and executor_id not in self.executor_ids: + self.executor_ids.append(executor_id) + + self.last_updated = datetime.utcnow() + + def _calculate_realized_pnl(self): + """Calculate realized PnL from matched buy/sell pairs using FIFO.""" + matched = self.matched_amount_base + if matched > 0 and self.buy_amount_base > 0 and self.sell_amount_base > 0: + # Average prices + avg_buy = self.buy_amount_quote / self.buy_amount_base + avg_sell = self.sell_amount_quote / self.sell_amount_base + # Realized PnL = matched_amount * (avg_sell - avg_buy) + self.realized_pnl_quote = matched * (avg_sell - avg_buy) + + def get_unrealized_pnl(self, current_price: Decimal) -> Decimal: + """ + Calculate unrealized PnL for unmatched position. + + Args: + current_price: Current market price + + Returns: + Unrealized PnL in quote currency + """ + if self.net_amount_base > 0: + # Long position: profit if price goes up + avg_buy = self.buy_breakeven_price or Decimal("0") + return self.net_amount_base * (current_price - avg_buy) + elif self.net_amount_base < 0: + # Short position: profit if price goes down + avg_sell = self.sell_breakeven_price or Decimal("0") + return abs(self.net_amount_base) * (avg_sell - current_price) + return Decimal("0") + + def merge(self, other: "PositionHold"): + """Merge another PositionHold into this one.""" + self.buy_amount_base += other.buy_amount_base + self.buy_amount_quote += other.buy_amount_quote + self.sell_amount_base += other.sell_amount_base + self.sell_amount_quote += other.sell_amount_quote + + for eid in other.executor_ids: + if eid not in self.executor_ids: + self.executor_ids.append(eid) + + self._calculate_realized_pnl() + self.last_updated = datetime.utcnow() + + +class PositionHoldResponse(BaseModel): + """API response model for PositionHold.""" + trading_pair: str + connector_name: str + account_name: str + buy_amount_base: float + buy_amount_quote: float + sell_amount_base: float + sell_amount_quote: float + net_amount_base: float + buy_breakeven_price: Optional[float] + sell_breakeven_price: Optional[float] + matched_amount_base: float + unmatched_amount_base: float + position_side: Optional[str] + realized_pnl_quote: float + unrealized_pnl_quote: Optional[float] = None + executor_count: int + executor_ids: List[str] + last_updated: Optional[str] + + +class PositionsSummaryResponse(BaseModel): + """Summary of all held positions.""" + total_positions: int = Field(description="Number of active position holds") + total_realized_pnl: float = Field(description="Total realized PnL across all positions") + positions: List[PositionHoldResponse] = Field(description="List of position holds") + + # ======================================== # Executor Type Definitions # ======================================== diff --git a/services/executor_service.py b/services/executor_service.py index fc060f95..da33a684 100644 --- a/services/executor_service.py +++ b/services/executor_service.py @@ -30,9 +30,10 @@ from hummingbot.strategy_v2.executors.xemm_executor.data_types import XEMMExecutorConfig from hummingbot.strategy_v2.executors.xemm_executor.xemm_executor import XEMMExecutor from hummingbot.strategy_v2.models.base import RunnableStatus -from hummingbot.strategy_v2.models.executors import TrackedOrder +from hummingbot.strategy_v2.models.executors import CloseType, TrackedOrder from database import AsyncDatabaseManager +from models.executors import PositionHold from services.trading_service import TradingService, AccountTradingInterface logger = logging.getLogger(__name__) @@ -117,6 +118,10 @@ def __init__( # Completed executors (kept for a period for queries) self._completed_executors: Dict[str, Dict[str, Any]] = {} + # Position holds: key = "account_name|connector_name|trading_pair" + # Tracks aggregated positions from executors stopped with keep_position=True + self._positions_held: Dict[str, PositionHold] = {} + # Control loop task self._control_loop_task: Optional[asyncio.Task] = None self._is_running = False @@ -426,6 +431,10 @@ async def _handle_executor_completion(self, executor_id: str): # Store in completed executors self._completed_executors[executor_id] = final_info + # Check if this is a POSITION_HOLD close type (keep_position=True) + if executor.close_type == CloseType.POSITION_HOLD: + await self._aggregate_position_hold(executor_id, executor, metadata) + # Persist final state to database await self._persist_executor_completed(executor_id, executor) @@ -647,3 +656,239 @@ def remove_completed_executor(self, executor_id: str) -> bool: del self._completed_executors[executor_id] return True return False + + # ======================================== + # Position Hold Tracking Methods + # ======================================== + + def _get_position_key( + self, + account_name: str, + connector_name: str, + trading_pair: str + ) -> str: + """Generate a unique key for position tracking.""" + return f"{account_name}|{connector_name}|{trading_pair}" + + async def _aggregate_position_hold( + self, + executor_id: str, + executor: ExecutorBase, + metadata: Dict[str, Any] + ): + """ + Aggregate position data from an executor stopped with keep_position=True. + + This extracts the filled amounts from the executor and adds them to + the aggregated position tracking. + """ + account_name = metadata.get("account_name", self.default_account) + connector_name = metadata.get("connector_name", "") + trading_pair = metadata.get("trading_pair", "") + + if not connector_name or not trading_pair: + logger.warning(f"Cannot aggregate position for executor {executor_id}: missing connector/pair info") + return + + position_key = self._get_position_key(account_name, connector_name, trading_pair) + + # Get or create position hold + if position_key not in self._positions_held: + self._positions_held[position_key] = PositionHold( + trading_pair=trading_pair, + connector_name=connector_name, + account_name=account_name + ) + + position = self._positions_held[position_key] + + # Extract filled amounts from executor + try: + # Try to get executor info + try: + executor_info = executor.executor_info + custom_info = executor_info.custom_info or {} + except Exception: + custom_info = executor.get_custom_info() if hasattr(executor, 'get_custom_info') else {} + + # Get side from config or custom_info + config = metadata.get("config", {}) + side = config.get("side", custom_info.get("side", "BUY")) + + # Extract filled amounts - try different sources + filled_amount_base = Decimal("0") + filled_amount_quote = Decimal("0") + + # Try from executor attributes directly + if hasattr(executor, 'filled_amount_base'): + filled_amount_base = Decimal(str(executor.filled_amount_base or 0)) + if hasattr(executor, 'filled_amount_quote'): + filled_amount_quote = Decimal(str(executor.filled_amount_quote or 0)) + + # Fallback to custom_info + if filled_amount_base == 0 and custom_info: + filled_amount_base = Decimal(str(custom_info.get("filled_amount_base", 0))) + if filled_amount_quote == 0 and custom_info: + filled_amount_quote = Decimal(str(custom_info.get("filled_amount_quote", 0))) + + # For grid executors, aggregate from held_position_orders + if metadata.get("executor_type") == "grid_executor" and custom_info: + buy_filled_base = Decimal("0") + buy_filled_quote = Decimal("0") + sell_filled_base = Decimal("0") + sell_filled_quote = Decimal("0") + + # held_position_orders contains the orders kept when keep_position=True + held_orders = custom_info.get("held_position_orders", []) + + for order in held_orders: + if isinstance(order, dict): + trade_type = order.get("trade_type", "BUY") + exec_base = Decimal(str(order.get("executed_amount_base", 0))) + exec_quote = Decimal(str(order.get("executed_amount_quote", 0))) + + if trade_type == "BUY": + buy_filled_base += exec_base + buy_filled_quote += exec_quote + else: + sell_filled_base += exec_base + sell_filled_quote += exec_quote + + # Add buy and sell fills separately + if buy_filled_base > 0: + position.add_fill("BUY", buy_filled_base, buy_filled_quote, executor_id) + if sell_filled_base > 0: + position.add_fill("SELL", sell_filled_base, sell_filled_quote, executor_id) + + logger.info( + f"Aggregated grid executor {executor_id} to position {position_key}: " + f"buy={buy_filled_base} base, sell={sell_filled_base} base" + ) + + elif filled_amount_base > 0: + # For non-grid executors with a single side + position.add_fill(side, filled_amount_base, filled_amount_quote, executor_id) + logger.info( + f"Aggregated executor {executor_id} to position {position_key}: " + f"{side} {filled_amount_base} base @ {filled_amount_quote} quote" + ) + else: + logger.debug(f"Executor {executor_id} has no filled amounts to aggregate") + + except Exception as e: + logger.error(f"Error aggregating position for executor {executor_id}: {e}", exc_info=True) + + def get_positions_held( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None + ) -> List[PositionHold]: + """ + Get held positions with optional filtering. + + Args: + account_name: Filter by account name + connector_name: Filter by connector name + trading_pair: Filter by trading pair + + Returns: + List of PositionHold objects matching the filters + """ + positions = [] + + for position in self._positions_held.values(): + # Apply filters + if account_name and position.account_name != account_name: + continue + if connector_name and position.connector_name != connector_name: + continue + if trading_pair and position.trading_pair != trading_pair: + continue + + # Only include positions with actual volume + if position.buy_amount_base > 0 or position.sell_amount_base > 0: + positions.append(position) + + return positions + + def get_position_held( + self, + account_name: str, + connector_name: str, + trading_pair: str + ) -> Optional[PositionHold]: + """ + Get a specific held position. + + Args: + account_name: Account name + connector_name: Connector name + trading_pair: Trading pair + + Returns: + PositionHold or None if not found + """ + position_key = self._get_position_key(account_name, connector_name, trading_pair) + return self._positions_held.get(position_key) + + def clear_position_held( + self, + account_name: str, + connector_name: str, + trading_pair: str + ) -> bool: + """ + Clear a specific held position (after manual close or full exit). + + Args: + account_name: Account name + connector_name: Connector name + trading_pair: Trading pair + + Returns: + True if cleared, False if not found + """ + position_key = self._get_position_key(account_name, connector_name, trading_pair) + if position_key in self._positions_held: + del self._positions_held[position_key] + logger.info(f"Cleared position hold for {position_key}") + return True + return False + + def get_positions_summary(self) -> Dict[str, Any]: + """ + Get summary of all held positions. + + Returns: + Dictionary with total positions, PnL, and position list + """ + positions = self.get_positions_held() + total_realized_pnl = sum(float(p.realized_pnl_quote) for p in positions) + + return { + "total_positions": len(positions), + "total_realized_pnl": total_realized_pnl, + "positions": [ + { + "trading_pair": p.trading_pair, + "connector_name": p.connector_name, + "account_name": p.account_name, + "buy_amount_base": float(p.buy_amount_base), + "buy_amount_quote": float(p.buy_amount_quote), + "sell_amount_base": float(p.sell_amount_base), + "sell_amount_quote": float(p.sell_amount_quote), + "net_amount_base": float(p.net_amount_base), + "buy_breakeven_price": float(p.buy_breakeven_price) if p.buy_breakeven_price else None, + "sell_breakeven_price": float(p.sell_breakeven_price) if p.sell_breakeven_price else None, + "matched_amount_base": float(p.matched_amount_base), + "unmatched_amount_base": float(p.unmatched_amount_base), + "position_side": p.position_side, + "realized_pnl_quote": float(p.realized_pnl_quote), + "executor_count": len(p.executor_ids), + "executor_ids": p.executor_ids, + "last_updated": p.last_updated.isoformat() if p.last_updated else None + } + for p in positions + ] + } From 57be279c08b744d41ba5fa5e7941069af6801a9a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 22 Jan 2026 12:19:36 -0300 Subject: [PATCH 17/20] (feat) clean up connector code --- services/unified_connector_service.py | 265 +++++++------------------- 1 file changed, 67 insertions(+), 198 deletions(-) diff --git a/services/unified_connector_service.py b/services/unified_connector_service.py index f473395a..244ac9e0 100644 --- a/services/unified_connector_service.py +++ b/services/unified_connector_service.py @@ -25,6 +25,7 @@ from hummingbot.client.settings import AllConnectorSettings from hummingbot.connector.connector_base import ConnectorBase from hummingbot.connector.connector_metrics_collector import TradeVolumeMetricCollector +from hummingbot.connector.exchange_py_base import ExchangePyBase from hummingbot.connector.perpetual_derivative_py_base import PerpetualDerivativePyBase from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState @@ -73,54 +74,6 @@ def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): # Connector settings cache self._conn_settings = AllConnectorSettings.get_connector_settings() - # ========================================================================= - # Trading Pairs Type-Normalizer Helpers - # ========================================================================= - - def _add_to_trading_pairs(self, trading_pairs_attr, trading_pair: str) -> bool: - """Add trading pair to a _trading_pairs attribute regardless of its type. - - Args: - trading_pairs_attr: The _trading_pairs attribute (set, list, or dict) - trading_pair: The trading pair to add - - Returns: - True if added, False if already present or unsupported type - """ - if isinstance(trading_pairs_attr, set): - if trading_pair not in trading_pairs_attr: - trading_pairs_attr.add(trading_pair) - return True - return False - elif isinstance(trading_pairs_attr, list): - if trading_pair not in trading_pairs_attr: - trading_pairs_attr.append(trading_pair) - return True - return False - return False - - def _remove_from_trading_pairs(self, trading_pairs_attr, trading_pair: str) -> bool: - """Remove trading pair from a _trading_pairs attribute regardless of its type. - - Args: - trading_pairs_attr: The _trading_pairs attribute (set, list, or dict) - trading_pair: The trading pair to remove - - Returns: - True if removed, False if not present or unsupported type - """ - if isinstance(trading_pairs_attr, set): - if trading_pair in trading_pairs_attr: - trading_pairs_attr.discard(trading_pair) - return True - return False - elif isinstance(trading_pairs_attr, list): - if trading_pair in trading_pairs_attr: - trading_pairs_attr.remove(trading_pair) - return True - return False - return False - def _is_perpetual_connector(self, connector: ConnectorBase) -> bool: """Check if connector is a perpetual derivative connector. @@ -251,8 +204,8 @@ async def ensure_data_connector_started( try: # Add trading pair before starting network - if hasattr(connector, '_trading_pairs'): - self._add_to_trading_pairs(connector._trading_pairs, trading_pair) + if trading_pair not in connector._trading_pairs: + connector._trading_pairs.append(trading_pair) # Start network await connector.start_network() @@ -361,10 +314,6 @@ async def initialize_order_book( logger.error(f"No connector available for {connector_name}") return False - if not hasattr(connector, 'order_book_tracker'): - logger.warning(f"Connector {connector_name} has no order_book_tracker") - return False - tracker = connector.order_book_tracker # Check if already initialized @@ -409,92 +358,75 @@ def _is_tracker_running(self, tracker) -> bool: """Check if the order book tracker is running.""" if not tracker: return False - # Check if any of the main tasks exist and are not done - task = getattr(tracker, '_order_book_stream_listener_task', None) + task = tracker._order_book_stream_listener_task if task and not task.done(): return True - task = getattr(tracker, '_init_order_books_task', None) + task = tracker._init_order_books_task if task and not task.done(): return True return False async def _add_trading_pair_to_tracker( self, - connector: ConnectorBase, + connector: ExchangePyBase, trading_pair: str ) -> bool: """Add a trading pair to connector's order book tracker. - Simplified approach: - 1. If tracker is already running, use connector.add_trading_pair() - 2. Otherwise, register the pair first, then start the tracker + ExchangePyBase connectors have: + - order_book_tracker with _trading_pairs, start(), _orderbook_ds + - add_trading_pair() for dynamic addition - The connector's add_trading_pair() method (from ExchangePyBase) handles: - - WebSocket subscription - - Order book snapshot fetching - - Tracking task creation + Approach: + 1. If tracker is running, use connector.add_trading_pair() + 2. Otherwise, register the pair and start the tracker """ try: - if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: - logger.warning(f"Connector {type(connector).__name__} has no order_book_tracker") - return False - tracker = connector.order_book_tracker # Case 1: Tracker is already running and ready if self._is_tracker_running(tracker) and tracker.ready: - # Check if pair is already tracked if trading_pair in tracker.order_books: logger.debug(f"Order book for {trading_pair} already exists") return True - # Use connector's add_trading_pair method for dynamic addition - if hasattr(connector, 'add_trading_pair'): - logger.info(f"Adding {trading_pair} to running tracker via connector.add_trading_pair()") - result = await connector.add_trading_pair(trading_pair) - if result: - logger.info(f"Successfully added {trading_pair} via connector.add_trading_pair()") - return True - else: - logger.warning(f"connector.add_trading_pair() returned False for {trading_pair}") + logger.info(f"Adding {trading_pair} to running tracker") + result = await connector.add_trading_pair(trading_pair) + if result: + logger.info(f"Successfully added {trading_pair}") + return True + logger.warning(f"add_trading_pair() returned False for {trading_pair}") - # Case 2: Tracker not running - need to start it with this trading pair + # Case 2: Tracker not running - start it with this trading pair else: logger.info(f"Starting order book tracker for {type(connector).__name__} with {trading_pair}") - # Register the trading pair FIRST (before starting tracker) - if hasattr(tracker, '_trading_pairs'): - self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) - logger.debug(f"Registered {trading_pair} with tracker._trading_pairs") + # Register the trading pair before starting tracker + if trading_pair not in tracker._trading_pairs: + tracker._trading_pairs.append(trading_pair) - # Start the tracker - it will initialize order books for registered pairs - if hasattr(tracker, 'start'): - tracker.start() - logger.info(f"Called tracker.start() for {type(connector).__name__}") - - # Wait for tracker to be ready - try: - await asyncio.wait_for(tracker.wait_ready(), timeout=30.0) - logger.info(f"Order book tracker ready for {type(connector).__name__}") - except asyncio.TimeoutError: - logger.warning(f"Timeout waiting for tracker to be ready, checking order book directly") - - # Verify order book was initialized - if trading_pair in tracker.order_books: - logger.info(f"Order book for {trading_pair} initialized during tracker startup") - return True - - # Fallback: Try to get order book snapshot directly - logger.info(f"Attempting fallback order book initialization for {trading_pair}") - if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + tracker.start() try: - order_book = await connector._orderbook_ds.get_new_order_book(trading_pair) - tracker.order_books[trading_pair] = order_book - self._add_to_trading_pairs(tracker._trading_pairs, trading_pair) - logger.info(f"Initialized order book for {trading_pair} via REST fallback") + await asyncio.wait_for(tracker.wait_ready(), timeout=30.0) + logger.info(f"Order book tracker ready for {type(connector).__name__}") + except asyncio.TimeoutError: + logger.warning(f"Timeout waiting for tracker to be ready") + + if trading_pair in tracker.order_books: + logger.info(f"Order book for {trading_pair} initialized") return True - except Exception as e: - logger.error(f"Fallback order book initialization failed: {e}") + + # Fallback: Get order book snapshot directly via REST + logger.info(f"Fallback order book initialization for {trading_pair}") + try: + order_book = await connector._orderbook_ds.get_new_order_book(trading_pair) + tracker.order_books[trading_pair] = order_book + if trading_pair not in tracker._trading_pairs: + tracker._trading_pairs.append(trading_pair) + logger.info(f"Initialized order book for {trading_pair} via REST fallback") + return True + except Exception as e: + logger.error(f"Fallback order book initialization failed: {e}") logger.error(f"Failed to add {trading_pair} to order book tracker") return False @@ -536,60 +468,31 @@ async def remove_trading_pair( async def _remove_trading_pair_from_tracker( self, - connector: ConnectorBase, + connector: ExchangePyBase, trading_pair: str ) -> bool: """Remove a trading pair from connector's order book tracker. - Uses the connector's remove_trading_pair method which is now available on - ExchangePyBase (and PerpetualDerivativePyBase). This method handles: + ExchangePyBase.remove_trading_pair() handles: - Order book cleanup via order_book_tracker - Funding info cleanup for perpetual connectors """ try: - # Try connector.remove_trading_pair() first - available on ExchangePyBase - # and PerpetualDerivativePyBase (handles funding info cleanup automatically) - if hasattr(connector, 'remove_trading_pair'): - try: - result = await connector.remove_trading_pair(trading_pair) - if result: - logger.info(f"Removed trading pair {trading_pair} via connector.remove_trading_pair()") - return True - except Exception as e: - logger.debug(f"connector.remove_trading_pair failed: {e}") - - # Fallback: Try order_book_tracker.remove_trading_pair directly - if hasattr(connector, 'order_book_tracker') and hasattr(connector.order_book_tracker, 'remove_trading_pair'): - try: - result = await connector.order_book_tracker.remove_trading_pair(trading_pair) - if result: - logger.info(f"Removed trading pair {trading_pair} via order_book_tracker.remove_trading_pair()") - return True - except Exception as e: - logger.debug(f"order_book_tracker.remove_trading_pair failed: {e}") - - # Last resort fallback: Manual removal from tracker - if hasattr(connector, 'order_book_tracker'): - tracker = connector.order_book_tracker - removed = False - - # Remove from order_books dict - if hasattr(tracker, 'order_books') and trading_pair in tracker.order_books: - del tracker.order_books[trading_pair] - removed = True - - # Remove from trading pairs tracking - if hasattr(tracker, '_trading_pairs'): - self._remove_from_trading_pairs(tracker._trading_pairs, trading_pair) + result = await connector.remove_trading_pair(trading_pair) + if result: + logger.info(f"Removed trading pair {trading_pair}") + return True - if removed: - logger.info(f"Removed trading pair {trading_pair} via manual fallback") - return True + # Fallback: Manual removal from tracker + tracker = connector.order_book_tracker + if trading_pair in tracker.order_books: + del tracker.order_books[trading_pair] + if trading_pair in tracker._trading_pairs: + tracker._trading_pairs.remove(trading_pair) + logger.info(f"Removed trading pair {trading_pair} via manual fallback") + return True - logger.warning( - f"Connector {type(connector).__name__} doesn't support " - f"dynamic trading pair removal or pair not found" - ) + logger.warning(f"Trading pair {trading_pair} not found") return False except Exception as e: @@ -598,36 +501,16 @@ async def _remove_trading_pair_from_tracker( async def _wait_for_websocket_ready( self, - connector: ConnectorBase, + connector: ExchangePyBase, timeout: float = 10.0 ) -> bool: - """Wait for the order book data source WebSocket to be connected. - - The tracker.start() method launches listen_for_subscriptions() which: - 1. Connects to WebSocket via _connected_websocket_assistant() - 2. Sets _ws_assistant reference - 3. Subscribes to channels - - We need to wait for _ws_assistant to be set before trying to - subscribe to new trading pairs dynamically. - - Args: - connector: The connector to check - timeout: Maximum time to wait in seconds - - Returns: - True if WebSocket is ready, False if timeout - """ - if not hasattr(connector, '_orderbook_ds') or not connector._orderbook_ds: - # No order book data source, can't check WebSocket - return True - + """Wait for the order book data source WebSocket to be connected.""" data_source = connector._orderbook_ds waited = 0 interval = 0.2 while waited < timeout: - if hasattr(data_source, '_ws_assistant') and data_source._ws_assistant is not None: + if data_source._ws_assistant is not None: logger.debug(f"WebSocket ready for {type(connector).__name__}") return True await asyncio.sleep(interval) @@ -826,10 +709,8 @@ async def _stop_connector_network(self, connector: ConnectorBase): setattr(connector, task_name, None) # Stop the order book tracker - if hasattr(connector, 'order_book_tracker') and connector.order_book_tracker: - tracker = connector.order_book_tracker - if hasattr(tracker, 'stop'): - tracker.stop() + if connector.order_book_tracker: + connector.order_book_tracker.stop() async def _update_connector_state( self, @@ -846,7 +727,7 @@ async def _update_connector_state( if self._is_perpetual_connector(connector): await connector._update_positions() - if hasattr(connector, '_update_order_status') and connector.in_flight_orders: + if connector.in_flight_orders: await connector._update_order_status() if account_name: await self._sync_orders_to_database( @@ -1386,15 +1267,8 @@ async def restart_order_book_tracker( if not connector: return {"success": False, "error": f"No connector found for {connector_name}"} - if not hasattr(connector, 'order_book_tracker') or not connector.order_book_tracker: - return {"success": False, "error": "Connector has no order book tracker"} - tracker = connector.order_book_tracker - - # Get existing trading pairs before stopping - trading_pairs = [] - if hasattr(tracker, '_trading_pairs'): - trading_pairs = list(tracker._trading_pairs) if isinstance(tracker._trading_pairs, (list, set)) else [] + trading_pairs = list(tracker._trading_pairs) if not trading_pairs: return {"success": False, "error": "No trading pairs to restart"} @@ -1408,14 +1282,9 @@ async def restart_order_book_tracker( await asyncio.sleep(0.5) # Re-add trading pairs to tracker before restarting - if hasattr(tracker, '_trading_pairs'): - if isinstance(tracker._trading_pairs, set): - tracker._trading_pairs.clear() - for tp in trading_pairs: - tracker._trading_pairs.add(tp) - elif isinstance(tracker._trading_pairs, list): - tracker._trading_pairs.clear() - tracker._trading_pairs.extend(trading_pairs) + tracker._trading_pairs.clear() + for tp in trading_pairs: + tracker._trading_pairs.append(tp) # Restart the tracker logger.info(f"Restarting order book tracker for {connector_name} with pairs: {trading_pairs}") From f19533c1ff91291d6d4f60642ce9fc19b21265ee Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 23 Jan 2026 15:44:05 -0300 Subject: [PATCH 18/20] (feat) add position hold recovery from database on startup - Add recover_positions_from_db() method to ExecutorService - Load executors with POSITION_HOLD close type on startup - Reconstruct _positions_held tracking from final state - Add get_position_hold_executors() to ExecutorRepository Co-Authored-By: Claude Opus 4.5 --- database/repositories/executor_repository.py | 25 ++++++++ main.py | 1 + services/executor_service.py | 64 ++++++++++++++++++++ 3 files changed, 90 insertions(+) diff --git a/database/repositories/executor_repository.py b/database/repositories/executor_repository.py index 72d034aa..adb1e1f6 100644 --- a/database/repositories/executor_repository.py +++ b/database/repositories/executor_repository.py @@ -142,6 +142,31 @@ async def get_active_executors( result = await self.session.execute(stmt) return list(result.scalars().all()) + async def get_position_hold_executors( + self, + account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None + ) -> List[ExecutorRecord]: + """Get executors that closed with POSITION_HOLD (keep_position=True).""" + stmt = select(ExecutorRecord).where(ExecutorRecord.close_type == "POSITION_HOLD") + + conditions = [] + if account_name: + conditions.append(ExecutorRecord.account_name == account_name) + if connector_name: + conditions.append(ExecutorRecord.connector_name == connector_name) + if trading_pair: + conditions.append(ExecutorRecord.trading_pair == trading_pair) + + if conditions: + stmt = stmt.where(and_(*conditions)) + + stmt = stmt.order_by(desc(ExecutorRecord.created_at)) + + result = await self.session.execute(stmt) + return list(result.scalars().all()) + async def get_executor_stats(self) -> Dict[str, Any]: """Get statistics about executors.""" # Total executors diff --git a/main.py b/main.py index dddf29ea..b132b554 100644 --- a/main.py +++ b/main.py @@ -254,6 +254,7 @@ async def lifespan(app: FastAPI): accounts_service.start() market_data_service.start() executor_service.start() + await executor_service.recover_positions_from_db() # Initialize all trading connectors at startup # This ensures orders are loaded into in_flight_orders and ready for management diff --git a/services/executor_service.py b/services/executor_service.py index da33a684..2e8f6df5 100644 --- a/services/executor_service.py +++ b/services/executor_service.py @@ -133,6 +133,70 @@ def start(self): self._control_loop_task = asyncio.create_task(self._control_loop()) logger.info("ExecutorService started") + async def recover_positions_from_db(self): + """ + Recover position holds from database on startup. + + This loads executors that closed with POSITION_HOLD (keep_position=True) + and reconstructs the _positions_held tracking from their final state. + """ + if not self.db_manager: + return + + try: + async with self.db_manager.get_session_context() as session: + from database.repositories.executor_repository import ExecutorRepository + repo = ExecutorRepository(session) + + position_hold_executors = await repo.get_position_hold_executors() + + for executor_record in position_hold_executors: + # Build position key + position_key = self._get_position_key( + executor_record.account_name, + executor_record.connector_name, + executor_record.trading_pair + ) + + # Initialize position if needed + if position_key not in self._positions_held: + self._positions_held[position_key] = PositionHold( + trading_pair=executor_record.trading_pair, + connector_name=executor_record.connector_name, + account_name=executor_record.account_name, + ) + + position = self._positions_held[position_key] + + # Try to extract fill data from final_state + if executor_record.final_state: + try: + final_state = json.loads(executor_record.final_state) + # Extract buy/sell amounts from final state if available + if 'realized_buy_size_quote' in final_state: + buy_quote = Decimal(str(final_state.get('realized_buy_size_quote', 0))) + sell_quote = Decimal(str(final_state.get('realized_sell_size_quote', 0))) + # Estimate base amounts from quote (rough approximation) + # The actual fill data would be more accurate but this is a fallback + if buy_quote > 0 or sell_quote > 0: + position.buy_amount_quote += buy_quote + position.sell_amount_quote += sell_quote + if executor_record.executor_id not in position.executor_ids: + position.executor_ids.append(executor_record.executor_id) + except (json.JSONDecodeError, TypeError) as e: + logger.debug(f"Could not parse final_state for {executor_record.executor_id}: {e}") + + # Use filled_amount_quote as fallback + elif executor_record.filled_amount_quote: + if executor_record.executor_id not in position.executor_ids: + position.executor_ids.append(executor_record.executor_id) + + if self._positions_held: + logger.info(f"Recovered {len(self._positions_held)} position holds from database") + + except Exception as e: + logger.error(f"Error recovering positions from database: {e}", exc_info=True) + async def stop(self): """Stop the executor service and all active executors.""" self._is_running = False From 8b4102ef83105c6acc2bd63b22442751c9b83013 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 23 Jan 2026 17:33:54 -0300 Subject: [PATCH 19/20] (feat) initialize connectors for db --- main.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/main.py b/main.py index b132b554..679424f0 100644 --- a/main.py +++ b/main.py @@ -250,17 +250,17 @@ async def lifespan(app: FastAPI): # 7. Start services # ========================================================================= + # Initialize all trading connectors FIRST (before any service that might use them) + # This ensures OrdersRecorder is properly attached before any concurrent access + logging.info("Initializing all trading connectors...") + await connector_service.initialize_all_trading_connectors() + bots_orchestrator.start() accounts_service.start() market_data_service.start() executor_service.start() await executor_service.recover_positions_from_db() - # Initialize all trading connectors at startup - # This ensures orders are loaded into in_flight_orders and ready for management - logging.info("Initializing all trading connectors...") - await connector_service.initialize_all_trading_connectors() - logging.info("All services started successfully") yield From fc6eaf6a467f3486e9415b6f80351b2b2bacd481 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 25 Jan 2026 23:54:14 -0300 Subject: [PATCH 20/20] (feat) prevent double initialization --- database/repositories/trade_repository.py | 31 ++++++++++++++++++--- services/funding_recorder.py | 7 ++++- services/orders_recorder.py | 11 ++++++-- services/unified_connector_service.py | 33 +++++++++++++++-------- 4 files changed, 64 insertions(+), 18 deletions(-) diff --git a/database/repositories/trade_repository.py b/database/repositories/trade_repository.py index d9f10ad2..f718a643 100644 --- a/database/repositories/trade_repository.py +++ b/database/repositories/trade_repository.py @@ -2,6 +2,7 @@ from typing import Dict, List, Optional from sqlalchemy import desc, select +from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession from database.models import Trade, Order @@ -11,12 +12,34 @@ class TradeRepository: def __init__(self, session: AsyncSession): self.session = session - async def create_trade(self, trade_data: Dict) -> Trade: - """Create a new trade record.""" + async def create_trade(self, trade_data: Dict) -> Optional[Trade]: + """Create a new trade record if it doesn't already exist. + + Returns the trade if created, or None if it already exists (idempotent). + Handles race conditions gracefully by catching IntegrityError. + """ + # Check if trade already exists + trade_id = trade_data.get("trade_id") + if trade_id: + existing = await self.get_trade_by_id(trade_id) + if existing: + return None # Already exists, skip silently + trade = Trade(**trade_data) self.session.add(trade) - await self.session.flush() # Get the ID - return trade + try: + await self.session.flush() # Get the ID + return trade + except IntegrityError: + # Race condition: another concurrent insert succeeded first + await self.session.rollback() + return None + + async def get_trade_by_id(self, trade_id: str) -> Optional[Trade]: + """Get a trade by its trade_id.""" + query = select(Trade).where(Trade.trade_id == trade_id) + result = await self.session.execute(query) + return result.scalar_one_or_none() async def get_trades(self, account_name: Optional[str] = None, connector_name: Optional[str] = None, diff --git a/services/funding_recorder.py b/services/funding_recorder.py index a12ebcdf..9560939c 100644 --- a/services/funding_recorder.py +++ b/services/funding_recorder.py @@ -34,8 +34,13 @@ def __init__(self, db_manager: AsyncDatabaseManager, account_name: str, connecto def start(self, connector: ConnectorBase): """Start recording funding payments for the given connector""" + # Idempotency guard: prevent double-registration of listeners + if self._connector is not None: + self.logger.warning(f"FundingRecorder already started for {self.account_name}/{self.connector_name}, ignoring duplicate start") + return + self._connector = connector - + # Subscribe to funding payment events for event, forwarder in self._event_pairs: connector.add_listener(event, forwarder) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 021a4512..90ca8119 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -54,8 +54,13 @@ def __init__(self, db_manager: AsyncDatabaseManager, account_name: str, connecto def start(self, connector: ConnectorBase): """Start recording orders for the given connector""" + # Idempotency guard: prevent double-registration of listeners + if self._connector is not None: + logger.warning(f"OrdersRecorder already started for {self.account_name}/{self.connector_name}, ignoring duplicate start") + return + self._connector = connector - + # Subscribe to order events using the same pattern as MarketsRecorder for event, forwarder in self._event_pairs: connector.add_listener(event, forwarder) @@ -257,7 +262,9 @@ async def _handle_order_filled(self, event: OrderFilledEvent): "fee_paid": validated_fee, "fee_currency": trade_fee_currency } - await trade_repo.create_trade(trade_data) + result = await trade_repo.create_trade(trade_data) + if result is None: + logger.debug(f"Trade {trade_id} already exists, skipping duplicate") except (ValueError, TypeError) as e: logger.error(f"Error creating trade record for {event.order_id}: {e}") logger.error(f"Trade data that failed: timestamp={event.timestamp}, amount={event.amount}, price={event.price}, fee={trade_fee_paid}") diff --git a/services/unified_connector_service.py b/services/unified_connector_service.py index 244ac9e0..a0bc2ab9 100644 --- a/services/unified_connector_service.py +++ b/services/unified_connector_service.py @@ -71,6 +71,9 @@ def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): self._funding_recorders: Dict[str, any] = {} self._metrics_collectors: Dict[str, TradeVolumeMetricCollector] = {} + # Locks to prevent race conditions in connector creation + self._connector_locks: Dict[str, asyncio.Lock] = {} + # Connector settings cache self._conn_settings = AllConnectorSettings.get_connector_settings() @@ -111,16 +114,24 @@ async def get_trading_connector( Returns: Initialized trading connector """ - if account_name not in self._trading_connectors: - self._trading_connectors[account_name] = {} + cache_key = f"{account_name}:{connector_name}" - if connector_name not in self._trading_connectors[account_name]: - connector = await self._create_and_initialize_trading_connector( - account_name, connector_name - ) - self._trading_connectors[account_name][connector_name] = connector + # Create lock for this cache key if it doesn't exist + if cache_key not in self._connector_locks: + self._connector_locks[cache_key] = asyncio.Lock() + + # Use lock to prevent race conditions during connector creation + async with self._connector_locks[cache_key]: + if account_name not in self._trading_connectors: + self._trading_connectors[account_name] = {} + + if connector_name not in self._trading_connectors[account_name]: + connector = await self._create_and_initialize_trading_connector( + account_name, connector_name + ) + self._trading_connectors[account_name][connector_name] = connector - return self._trading_connectors[account_name][connector_name] + return self._trading_connectors[account_name][connector_name] def get_all_trading_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: """ @@ -997,10 +1008,10 @@ async def update_connector_keys( BackendAPISecurity.update_connector_keys(account_name, connector_config) BackendAPISecurity.decrypt_all(account_name=account_name) - # Clear old connector - self.clear_trading_connector(account_name, connector_name) + # Properly stop old connector (stops recorders, network tasks, cleans up caches) + await self.stop_trading_connector(account_name, connector_name) - # Create new connector + # Create new connector with fresh recorders return await self.get_trading_connector(account_name, connector_name) def clear_trading_connector(