Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions electro/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from . import types_ as types
from .flow_manager import global_flow_manager
from .routes import files
from .toolkit.tortoise_orm import get_tortoise_config

app = FastAPI(
Expand All @@ -23,6 +24,9 @@ async def process_message(message: types.Message) -> list[types.Message] | None:
return await global_flow_manager.on_message(message)


# Register routes
app.include_router(files.router)

# region Register Tortoise
register_tortoise(
app,
Expand Down
180 changes: 180 additions & 0 deletions electro/routes/files.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
"""API routes for file handling."""

import uuid
from typing import List, Optional
from fastapi import APIRouter, UploadFile, File as FastAPIFile, HTTPException, Depends
from fastapi.responses import StreamingResponse
from pydantic import BaseModel

from ..toolkit.file_storage.models import File
from ..toolkit.file_storage.storage_services import LocalFileStorage
from ..settings import Settings

router = APIRouter(prefix="/api/files", tags=["files"])

# Response models
class FileMetadata(BaseModel):
"""File metadata response model."""
id: str
filename: str
size_bytes: int
content_type: Optional[str]
download_url: str
created_at: str

class FileList(BaseModel):
"""File list response model."""
total: int
page: int
page_size: int
files: List[FileMetadata]

# Dependencies
async def get_storage_service():
"""Get the configured storage service."""
settings = Settings.get_current()
if settings.FILE_STORAGE_SERVICE == "local":
return LocalFileStorage()
# TODO: Add other storage services
raise ValueError(f"Unsupported storage service: {settings.FILE_STORAGE_SERVICE}")

@router.post("/upload", response_model=FileMetadata)
async def upload_file(
file: UploadFile = FastAPIFile(...),
storage_service: LocalFileStorage = Depends(get_storage_service)
):
"""Upload a file."""
try:
settings = Settings.get_current()

# Validate file size
file_size = 0
chunk_size = 8192 # 8KB chunks
chunks = []

while True:
chunk = await file.read(chunk_size)
if not chunk:
break
file_size += len(chunk)
chunks.append(chunk)

if file_size > settings.MAX_UPLOAD_SIZE:
raise HTTPException(
status_code=413,
detail=f"File too large. Maximum size is {settings.MAX_UPLOAD_SIZE} bytes"
)

# Reset file pointer
await file.seek(0)

# Upload file to storage
object_key = await storage_service.upload_file(
file.file,
file.filename,
content_type=file.content_type
)

# Save metadata to database
db_file = await File.create(
id=uuid.uuid4(),
filename=file.filename,
object_key=object_key,
size_bytes=file_size,
content_type=file.content_type,
storage_service=settings.FILE_STORAGE_SERVICE
)

return FileMetadata(
id=str(db_file.id),
filename=db_file.filename,
size_bytes=db_file.size_bytes,
content_type=db_file.content_type,
download_url=await db_file.get_download_url(),
created_at=db_file.created_at.isoformat()
)

except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

@router.get("/download/{file_id}")
async def download_file(
file_id: str,
storage_service: LocalFileStorage = Depends(get_storage_service)
):
"""Download a file."""
file = await File.get_or_none(id=file_id)
if not file:
raise HTTPException(status_code=404, detail="File not found")

try:
file_data = await storage_service.download_file(file.object_key)

return StreamingResponse(
file_data,
media_type=file.content_type,
headers={
"Content-Disposition": f'attachment; filename="{file.filename}"',
"Content-Length": str(file.size_bytes)
}
)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

@router.get("/{file_id}", response_model=FileMetadata)
async def get_file_metadata(file_id: str):
"""Get file metadata."""
file = await File.get_or_none(id=file_id)
if not file:
raise HTTPException(status_code=404, detail="File not found")

return FileMetadata(
id=str(file.id),
filename=file.filename,
size_bytes=file.size_bytes,
content_type=file.content_type,
download_url=await file.get_download_url(),
created_at=file.created_at.isoformat()
)

@router.get("", response_model=FileList)
async def list_files(
page: int = 1,
page_size: int = 50,
sort_by: str = "created_at",
sort_order: str = "desc"
):
"""List files with pagination."""
if page < 1:
raise HTTPException(status_code=400, detail="Page must be >= 1")
if page_size < 1 or page_size > 100:
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")

# Validate sort parameters
valid_sort_fields = {"created_at", "filename", "size_bytes"}
if sort_by not in valid_sort_fields:
raise HTTPException(status_code=400, detail=f"Sort field must be one of: {valid_sort_fields}")
if sort_order not in {"asc", "desc"}:
raise HTTPException(status_code=400, detail="Sort order must be 'asc' or 'desc'")

total = await File.all().count()
files = await File.all().order_by(
f"{'-' if sort_order == 'desc' else ''}{sort_by}"
).offset((page - 1) * page_size).limit(page_size)

return FileList(
total=total,
page=page,
page_size=page_size,
files=[
FileMetadata(
id=str(file.id),
filename=file.filename,
size_bytes=file.size_bytes,
content_type=file.content_type,
download_url=await file.get_download_url(),
created_at=file.created_at.isoformat()
)
for file in files
]
)
17 changes: 17 additions & 0 deletions electro/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from pydantic import PostgresDsn, RedisDsn
from pydantic_settings import BaseSettings, SettingsConfigDict
from typing import List, Optional

from .toolkit.images_storage.storages_enums import StoragesIDs

Expand Down Expand Up @@ -94,6 +95,22 @@ class Settings(BaseSettings):
GO_BACK_COMMAND: str = "_go_back"
RELOAD_COMMAND: str = "_reload"

# File Storage Settings
FILE_STORAGE_SERVICE: str = "local" # "local", "s3", or "azure"
MAX_UPLOAD_SIZE: int = 100 * 1024 * 1024 # 100MB
ALLOWED_FILE_TYPES: List[str] = ["image/*", "application/pdf", "text/*"]
LOCAL_STORAGE_PATH: str = "uploads"

# S3 Settings (for future use)
AWS_ACCESS_KEY_ID: Optional[str] = None
AWS_SECRET_ACCESS_KEY: Optional[str] = None
AWS_BUCKET_NAME: Optional[str] = None
AWS_REGION: Optional[str] = None

# Azure Settings (for future use)
AZURE_CONNECTION_STRING: Optional[str] = None
AZURE_CONTAINER_NAME: Optional[str] = None

# Validate GO_BACK_COMMAND
if GO_BACK_COMMAND.startswith(BOT_COMMAND_PREFIX):
raise ValueError(
Expand Down
6 changes: 6 additions & 0 deletions electro/toolkit/file_storage/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""File storage module for handling file uploads and downloads."""

from .storage_services import BaseFileStorageService
from .models import File

__all__ = ["BaseFileStorageService", "File"]
41 changes: 41 additions & 0 deletions electro/toolkit/file_storage/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Database models for file storage."""

from tortoise import fields
from tortoise.models import Model
from datetime import datetime
from typing import Optional

class File(Model):
"""File metadata model."""

id = fields.UUIDField(pk=True)
filename = fields.CharField(max_length=255)
object_key = fields.CharField(max_length=512, unique=True)
size_bytes = fields.BigIntField()
content_type = fields.CharField(max_length=255, null=True)
storage_service = fields.CharField(max_length=50) # e.g., "local", "s3", "azure"
metadata = fields.JSONField(default=dict)
created_at = fields.DatetimeField(auto_add_now=True)
updated_at = fields.DatetimeField(auto_add=True)

class Meta:
"""Model metadata."""
table = "files"

def __str__(self) -> str:
"""String representation of the file."""
return f"{self.filename} ({self.id})"

async def get_download_url(self) -> str:
"""Get the download URL for this file."""
# TODO: Implement URL generation based on storage service
return f"/api/files/download/{self.id}"

@property
def size_formatted(self) -> str:
"""Get human-readable file size."""
for unit in ['B', 'KB', 'MB', 'GB']:
if self.size_bytes < 1024:
return f"{self.size_bytes:.1f} {unit}"
self.size_bytes /= 1024
return f"{self.size_bytes:.1f} TB"
8 changes: 8 additions & 0 deletions electro/toolkit/file_storage/storage_services/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
"""Storage service implementations."""

from ._base_storage_service import BaseFileStorageService
from .local_storage import LocalFileStorage
# from .s3_storage import S3FileStorage # TODO: Implement S3 storage
# from .azure_storage import AzureFileStorage # TODO: Implement Azure storage

__all__ = ["BaseFileStorageService", "LocalFileStorage"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
"""Base storage service for file handling."""

from abc import ABC, abstractmethod
from io import BytesIO
from typing import BinaryIO, Dict, Any, Optional
from datetime import datetime

class BaseFileStorageService(ABC):
"""Base class for file storage services."""

@abstractmethod
async def upload_file(
self,
file: BinaryIO,
filename: str,
content_type: Optional[str] = None,
metadata: Optional[Dict[str, Any]] = None
) -> str:
"""Uploads a file to the storage and returns the object key.

Args:
file: File-like object to upload
filename: Original filename
content_type: MIME type of the file
metadata: Additional metadata to store with the file

Returns:
str: Object key of the uploaded file
"""
raise NotImplementedError

@abstractmethod
async def download_file(self, object_key: str) -> BytesIO:
"""Downloads a file from storage and returns a BytesIO object.

Args:
object_key: Object key of the file to download

Returns:
BytesIO: File contents
"""
raise NotImplementedError

@abstractmethod
async def get_file_metadata(self, object_key: str) -> Dict[str, Any]:
"""Gets metadata for a file.

Args:
object_key: Object key of the file

Returns:
Dict[str, Any]: File metadata including size, content type, etc.
"""
raise NotImplementedError

@abstractmethod
async def delete_file(self, object_key: str) -> None:
"""Deletes a file from storage.

Args:
object_key: Object key of the file to delete
"""
raise NotImplementedError

@abstractmethod
async def check_file_exists(self, object_key: str) -> bool:
"""Checks if a file exists in storage.

Args:
object_key: Object key of the file

Returns:
bool: True if file exists, False otherwise
"""
raise NotImplementedError
Loading