diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..6055abe40 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,72 @@ +# Git +.git +.gitignore +.gitattributes + +# Documentation +*.md +docs/ +README* + +# IDE +.vscode +.idea +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + +# Logs +logs/ +*.log + +# Node modules (will be installed in container) +node_modules/ +frontend/node_modules/ + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +python/.venv/ +*.egg-info/ +dist/ +build/ + +# Environment +.env +.env.local +.env.*.local + +# Database +*.db +*.db-journal +valuecell.db + +# LanceDB +lancedb/ + +# Build artifacts +frontend/build/ +frontend/dist/ +*.tsbuildinfo + +# Tauri +frontend/src-tauri/target/ + +# Docker +docker-compose*.yml +Dockerfile* +.dockerignore + +# Other +*.code-workspace +Makefile +start.sh +start.ps1 + diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..f5ca4f399 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +docker/entrypoint.sh text eol=lf + + diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..2ae3ae6f5 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,47 @@ +services: + backend: + build: + context: . + dockerfile: docker/Dockerfile.backend + container_name: valuecell-backend + ports: + - "8000:8000" + volumes: + - ./python:/app/python + - ./logs:/app/logs + - ./data:/app/data + - ./lancedb:/app/lancedb + environment: + - API_HOST=0.0.0.0 + - API_PORT=8000 + - CORS_ORIGINS=http://localhost:1420,http://localhost:3000 + - VALUECELL_SQLITE_DB=sqlite:////app/data/valuecell.db + env_file: + - .env + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/api/v1/healthz"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + frontend: + build: + context: . + dockerfile: docker/Dockerfile.frontend + container_name: valuecell-frontend + ports: + - "1420:1420" + volumes: + - ./frontend:/app/frontend + - /app/frontend/node_modules + environment: + - NODE_ENV=development + - VITE_API_BASE_URL=http://localhost:8000/api/v1 + - TAURI_DEV_HOST=0.0.0.0 + depends_on: + - backend + restart: unless-stopped + + diff --git a/docker/Dockerfile.backend b/docker/Dockerfile.backend new file mode 100644 index 000000000..f836568da --- /dev/null +++ b/docker/Dockerfile.backend @@ -0,0 +1,68 @@ +FROM docker.1ms.run/astral/uv:python3.12-bookworm-slim + +# Configure apt to use Aliyun mirror (for Debian) +RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources || \ + sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list || true + +# Install system dependencies including OpenSSL and CA certificates +RUN apt-get update && apt-get install -y \ + curl \ + openssl \ + ca-certificates \ + libssl3 \ + sqlite3 \ + && rm -rf /var/lib/apt/lists/* \ + && update-ca-certificates + + +WORKDIR /app + +# Copy Python project files +COPY python/pyproject.toml python/uv.lock ./python/ +COPY python/README.md ./python/README.md +COPY python/scripts ./python/scripts +COPY python/valuecell ./python/valuecell +COPY python/configs ./python/configs + +# Configure uv to use PyPI mirror (Tsinghua) +# Create pip config for uv to use mirror +RUN mkdir -p /root/.pip && \ + echo "[global]" > /root/.pip/pip.conf && \ + echo "index-url = https://pypi.tuna.tsinghua.edu.cn/simple" >> /root/.pip/pip.conf && \ + echo "[install]" >> /root/.pip/pip.conf && \ + echo "trusted-host = pypi.tuna.tsinghua.edu.cn" >> /root/.pip/pip.conf + +# Also set environment variable for uv +ENV UV_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple +ENV PIP_INDEX_URL=https://pypi.tuna.tsinghua.edu.cn/simple + +# Pre-cache the application dependencies +# Try with --locked first, fallback to without if lockfile is outdated +RUN --mount=type=cache,target=/root/.cache/uv \ + cd python && (uv sync --locked --no-install-project || uv sync --no-install-project) + +# Install the application dependencies +RUN --mount=type=cache,target=/root/.cache/uv \ + cd python && (uv sync --locked || uv sync) + +# Create logs directory +RUN mkdir -p /app/logs + +# Copy entrypoint script +COPY docker/entrypoint.sh /app/entrypoint.sh +# Normalize line endings to LF (handle CRLF from Windows) and make executable +RUN sed -i 's/\r$//' /app/entrypoint.sh && chmod +x /app/entrypoint.sh + +# Set entrypoint +ENTRYPOINT ["/app/entrypoint.sh"] + +EXPOSE 8000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ + CMD curl -f http://localhost:8000/api/v1/healthz || exit 1 + +# Run the backend server +WORKDIR /app/python +CMD ["uv", "run", "-m", "valuecell.server.main"] + diff --git a/docker/Dockerfile.frontend b/docker/Dockerfile.frontend new file mode 100644 index 000000000..328863c0f --- /dev/null +++ b/docker/Dockerfile.frontend @@ -0,0 +1,40 @@ +# Use bun image from domestic mirror (docker.1ms.run) +FROM docker.1ms.run/oven/bun:1.3.0-slim + +# Configure apt to use Aliyun mirror (for Debian) +RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources || \ + sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list || true + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + curl \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Copy frontend files +COPY frontend/package.json frontend/bun.lock ./frontend/ + +# Configure bun to use npm registry mirror (Taobao) +RUN echo "registry=https://registry.npmmirror.com" > /root/.npmrc && \ + echo "_authToken=" >> /root/.npmrc + +# Set environment variable for bun registry +ENV BUN_CONFIG_REGISTRY=https://registry.npmmirror.com + +# Install dependencies +RUN cd frontend && bun install --frozen-lockfile + +# Copy frontend source code +COPY frontend ./frontend + +EXPOSE 1420 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=20s --retries=3 \ + CMD curl -f http://localhost:1420 || exit 1 + +# Run the frontend dev server +WORKDIR /app/frontend +CMD ["bun", "run", "dev"] + diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 000000000..61f534cef --- /dev/null +++ b/docker/README.md @@ -0,0 +1,183 @@ +# Docker Deployment Guide + +This project supports running frontend and backend services using Docker containers. + +## Quick Start + +### 1. Configure Environment Variables + +```bash +cp .env.example .env +``` + +Edit the `.env` file with your API keys and preferences. This configuration file is shared across all agents. See [Configuration Guide](../docs/CONFIGURATION_GUIDE.md) for details. + +> **Note**: Some runtime environment variables (like `API_HOST`, `API_PORT`, `CORS_ORIGINS`) are already configured in `docker-compose.yml`. + +### 2. Build and Start Services + +```bash +# Build and start all services +docker-compose up -d + +# View logs +docker-compose logs -f + +# Start backend only +docker-compose up -d backend + +# Start frontend only +docker-compose up -d frontend +``` + +### 3. Access Services + +- **Frontend**: http://localhost:1420 +- **Backend API**: http://localhost:8000 +- **API Documentation**: http://localhost:8000/docs + +### 4. Stop Services + +```bash +# Stop all services +docker-compose down + +# Stop and remove volumes +docker-compose down -v +``` + +## Service Description + +### Backend Service + +- **Port**: 8000 +- **Image**: Based on `ghcr.io/astral-sh/uv:python3.12-bookworm-slim` +- **Working Directory**: `/app/python` +- **Entrypoint**: `/app/entrypoint.sh` (automatically initializes database if needed) +- **Start Command**: `uv run -m valuecell.server.main` +- **PyPI Mirror**: Configured to use Tsinghua University mirror source +- **Database**: Automatically initialized on first startup if not exists + +### Frontend Service + +- **Port**: 1420 +- **Image**: Based on `oven/bun:1.3.0-slim` +- **Working Directory**: `/app/frontend` +- **Start Command**: `bun run dev` +- **NPM Mirror**: Configured to use Taobao mirror source + +## Mirror Source Configuration + +Dockerfiles have automatically configured the following mirror sources for faster downloads: + +- **Docker Images**: Using `docker.1ms.run` mirror for base images (no additional Docker Desktop configuration needed) +- **APT (Debian)**: Alibaba Cloud mirror +- **PyPI (Python)**: Tsinghua University mirror +- **NPM (Node.js)**: Taobao mirror + +> **Note**: The Dockerfiles use `docker.1ms.run` mirror for pulling base images, so you don't need to configure Docker Desktop registry mirrors separately. + +## Data Persistence + +The following directories/files are mounted to containers, and data will be persisted: + +- `./python` → `/app/python` (backend code) +- `./logs` → `/app/logs` (log files) +- `./data` → `/app/data` (database and data files) + - Database file: `./data/valuecell.db` (automatically created if not exists) +- `./lancedb` → `/app/lancedb` (LanceDB data) +- `./frontend` → `/app/frontend` (frontend code) + +> **Note**: The database is automatically initialized on first startup if it doesn't exist. The entrypoint script checks and initializes the database before starting the server. + +## Development Mode + +In development mode, code changes are automatically reflected in containers (via volume mounts): + +```bash +# Start development environment +docker-compose up + +# View logs in another terminal +docker-compose logs -f frontend +docker-compose logs -f backend +``` + +## Production Deployment + +For production environments, it is recommended to: + +1. Modify port mappings in `docker-compose.yml` +2. Use environment variable files to manage configuration +3. Configure reverse proxy (such as Nginx) +4. Use Docker secrets to manage sensitive information +5. Consider using multi-stage builds to optimize image size + +## Troubleshooting + +### View Container Status + +```bash +docker-compose ps +``` + +### View Container Logs + +```bash +# All services +docker-compose logs + +# Specific service +docker-compose logs backend +docker-compose logs frontend + +# Real-time logs +docker-compose logs -f +``` + +### Enter Container for Debugging + +```bash +# Enter backend container +docker-compose exec backend bash + +# Enter frontend container +docker-compose exec frontend sh +``` + +### Rebuild Images + +```bash +# Force rebuild +docker-compose build --no-cache + +# Rebuild and start +docker-compose up -d --build +``` + +### Network Issues + +If encountering network connection issues: + +1. The Dockerfiles already use `docker.1ms.run` mirror for base images, which should provide good download speeds +2. If you still experience issues, try using a proxy: + ```bash + export HTTP_PROXY=http://your-proxy:port + export HTTPS_PROXY=http://your-proxy:port + docker-compose build + ``` + +## Environment Variables + +Environment variables can be configured via the `.env` file. See [Configuration Guide](../docs/CONFIGURATION_GUIDE.md) for details. + +> **Note**: Some runtime variables (`API_HOST`, `API_PORT`, `CORS_ORIGINS`) are configured in `docker-compose.yml`. + +### Build-time Environment Variables + +These variables are already configured in Dockerfiles and used during image build: + +- `UV_INDEX_URL`: PyPI mirror address (configured in `Dockerfile.backend` as Tsinghua source) +- `BUN_CONFIG_REGISTRY`: NPM mirror address (configured in `Dockerfile.frontend` as Taobao source) + +> **Note**: `UV_INDEX_URL` and `BUN_CONFIG_REGISTRY` are build-time variables set in the Dockerfiles. You don't need to configure them in `docker-compose.yml` or `.env` files as they only affect the image build process, not the running containers. diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100644 index 000000000..3c43c1cf3 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# Docker entrypoint script for backend service +# This script ensures the database is initialized before starting the server + +set -e + +echo "==========================================" +echo "ValueCell Backend Entrypoint" +echo "==========================================" + +# Get database path from environment or use default +DB_PATH="${VALUECELL_SQLITE_DB:-sqlite:///app/valuecell.db}" + +# Extract file path from SQLite URL (remove sqlite:// prefix, preserve leading slash) +if [[ "$DB_PATH" == sqlite:///* ]]; then + DB_FILE="${DB_PATH#sqlite://}" +else + DB_FILE="$DB_PATH" +fi + +echo "Database path: $DB_PATH" +echo "Database file: $DB_FILE" + +# Check if database file exists and is a regular file +if [ -e "$DB_FILE" ]; then + if [ -d "$DB_FILE" ]; then + echo "WARNING: Database path exists but is a directory, not a file!" + echo "Removing directory and creating database file..." + rm -rf "$DB_FILE" + elif [ -f "$DB_FILE" ]; then + echo "Database file exists: $DB_FILE" + # Check if database is valid SQLite file + if command -v sqlite3 &> /dev/null; then + if sqlite3 "$DB_FILE" "SELECT 1;" &> /dev/null; then + echo "Database file is valid SQLite database" + else + echo "WARNING: Database file exists but is not a valid SQLite database" + echo "Removing invalid file and will recreate..." + rm -f "$DB_FILE" + fi + fi + fi +fi + +# Create database directory if it doesn't exist +DB_DIR=$(dirname "$DB_FILE") +if [ "$DB_DIR" != "." ] && [ "$DB_DIR" != "/" ]; then + mkdir -p "$DB_DIR" + echo "Created database directory: $DB_DIR" +fi + +# Initialize database if it doesn't exist +if [ ! -f "$DB_FILE" ]; then + echo "Database file does not exist, initializing..." + cd /app/python + uv run -m valuecell.server.db.init_db || { + echo "ERROR: Database initialization failed" + exit 1 + } + echo "Database initialized successfully" +else + echo "Database file exists, skipping initialization" + # Run migration to ensure schema is up to date +fi + +echo "==========================================" +echo "Starting ValueCell Backend Server..." +echo "==========================================" + +# Execute the main command +exec "$@" diff --git a/docker/start-docker.sh b/docker/start-docker.sh new file mode 100644 index 000000000..058aac988 --- /dev/null +++ b/docker/start-docker.sh @@ -0,0 +1,56 @@ +#!/bin/bash +# Docker Quick Start Script + +set -e + +echo "==========================================" +echo "ValueCell Docker Startup Script" +echo "==========================================" + +# Check if Docker is installed +if ! command -v docker &> /dev/null; then + echo "Error: Docker not found. Please install Docker first." + exit 1 +fi + +# Check if Docker Compose is installed +if ! command -v docker-compose &> /dev/null && ! docker compose version &> /dev/null; then + echo "Error: Docker Compose not found. Please install Docker Compose first." + exit 1 +fi + +# Check for .env file +if [ ! -f .env ]; then + echo "Warning: .env file not found. Using default configuration." + echo "It is recommended to create a .env file and configure necessary environment variables." +fi + +# Build and start services +echo "" +echo "Building Docker images..." +docker-compose build + +echo "" +echo "Starting services..." +docker-compose up -d + +echo "" +echo "Waiting for services to start..." +sleep 5 + +# Check service status +echo "" +echo "Service status:" +docker-compose ps + +echo "" +echo "==========================================" +echo "Services started!" +echo "Frontend: http://localhost:1420" +echo "Backend API: http://localhost:8000" +echo "API Documentation: http://localhost:8000/docs" +echo "==========================================" +echo "" +echo "View logs: docker-compose logs -f" +echo "Stop services: docker-compose down" +echo "" diff --git a/python/scripts/migrate_db.py b/python/scripts/migrate_db.py new file mode 100644 index 000000000..e58c7a544 --- /dev/null +++ b/python/scripts/migrate_db.py @@ -0,0 +1,186 @@ +""" +Database migration script to add missing columns to existing tables. + +This script handles schema updates for existing databases without losing data. +""" + +import logging +import sys +from pathlib import Path + +from sqlalchemy import inspect, text + +from valuecell.server.config.settings import get_settings +from valuecell.server.db.connection import get_database_manager + +logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" +) +logger = logging.getLogger(__name__) + + +def column_exists(engine, table_name: str, column_name: str) -> bool: + """Check if a column exists in a table.""" + inspector = inspect(engine) + columns = [col["name"] for col in inspector.get_columns(table_name)] + return column_name in columns + + +def add_column_if_not_exists(engine, table_name: str, column_name: str, column_type: str, nullable: bool = True): + """Add a column to a table if it doesn't exist.""" + if column_exists(engine, table_name, column_name): + logger.info(f"Column {table_name}.{column_name} already exists, skipping") + return True + + try: + nullable_clause = "" if nullable else " NOT NULL" + with engine.connect() as conn: + conn.execute( + text(f"ALTER TABLE {table_name} ADD COLUMN {column_name} {column_type}{nullable_clause}") + ) + conn.commit() + logger.info(f"Added column {table_name}.{column_name}") + return True + except Exception as e: + logger.error(f"Failed to add column {table_name}.{column_name}: {e}") + return False + + +def index_exists(engine, table_name: str, index_name: str) -> bool: + """Check if an index exists on a table.""" + inspector = inspect(engine) + indexes = [idx["name"] for idx in inspector.get_indexes(table_name)] + return index_name in indexes + + +def create_index_if_not_exists(engine, table_name: str, column_name: str, index_name: str = None): + """Create an index on a column if it doesn't exist.""" + if index_name is None: + index_name = f"ix_{table_name}_{column_name}" + + if index_exists(engine, table_name, index_name): + logger.info(f"Index {index_name} already exists, skipping") + return True + + try: + with engine.connect() as conn: + conn.execute( + text(f"CREATE INDEX IF NOT EXISTS {index_name} ON {table_name}({column_name})") + ) + conn.commit() + logger.info(f"Created index {index_name} on {table_name}.{column_name}") + return True + except Exception as e: + logger.error(f"Failed to create index {index_name}: {e}") + return False + + +def migrate_strategy_portfolio_views(): + """Migrate strategy_portfolio_views table to add missing columns.""" + db_manager = get_database_manager() + engine = db_manager.get_engine() + + # Check if table exists + inspector = inspect(engine) + if "strategy_portfolio_views" not in inspector.get_table_names(): + logger.warning("Table strategy_portfolio_views does not exist, skipping migration") + return True + + logger.info("Migrating strategy_portfolio_views table...") + + # Add missing columns + migrations = [ + ("total_realized_pnl", "NUMERIC(20, 8)", True), + ("gross_exposure", "NUMERIC(20, 8)", True), + ("net_exposure", "NUMERIC(20, 8)", True), + ] + + success = True + for column_name, column_type, nullable in migrations: + if not add_column_if_not_exists(engine, "strategy_portfolio_views", column_name, column_type, nullable): + success = False + + if success: + logger.info("Migration completed successfully") + else: + logger.error("Migration completed with errors") + + return success + + +def migrate_strategy_details(): + """Migrate strategy_details table to add missing columns.""" + db_manager = get_database_manager() + engine = db_manager.get_engine() + + # Check if table exists + inspector = inspect(engine) + if "strategy_details" not in inspector.get_table_names(): + logger.warning("Table strategy_details does not exist, skipping migration") + return True + + logger.info("Migrating strategy_details table...") + + # Columns to add with their types and whether they need an index + # (name, type, nullable, needs_index) + columns_to_add = [ + ("compose_id", "VARCHAR(200)", True, True), + ("instruction_id", "VARCHAR(200)", True, True), + ("avg_exec_price", "NUMERIC(20, 8)", True, False), + ("realized_pnl", "NUMERIC(20, 8)", True, False), + ("realized_pnl_pct", "NUMERIC(10, 6)", True, False), + ("notional_entry", "NUMERIC(20, 8)", True, False), + ("notional_exit", "NUMERIC(20, 8)", True, False), + ("fee_cost", "NUMERIC(20, 8)", True, False), + ("entry_time", "TIMESTAMP", True, False), + ("exit_time", "TIMESTAMP", True, False), + ] + + success = True + for column_name, column_type, nullable, needs_index in columns_to_add: + # Add column if it doesn't exist + if not add_column_if_not_exists(engine, "strategy_details", column_name, column_type, nullable): + success = False + + # Create index if column exists and needs index + if column_exists(engine, "strategy_details", column_name) and needs_index: + index_name = f"ix_strategy_details_{column_name}" + if not create_index_if_not_exists(engine, "strategy_details", column_name, index_name): + logger.warning(f"Failed to create index on {column_name}, but column exists") + + if success: + logger.info("Migration completed successfully") + else: + logger.error("Migration completed with errors") + + return success + + +def main(): + """Run database migrations.""" + logger.info("Starting database migration...") + logger.info("=" * 50) + + try: + # Migrate strategy_portfolio_views + if not migrate_strategy_portfolio_views(): + logger.error("Migration failed") + sys.exit(1) + + # Migrate strategy_details + if not migrate_strategy_details(): + logger.error("Migration failed") + sys.exit(1) + + logger.info("=" * 50) + logger.info("All migrations completed successfully!") + sys.exit(0) + + except Exception as e: + logger.exception(f"Migration failed with error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + main() +