Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
@@ -1 +1,6 @@
OPENAI_API_KEY=your_openai_api_key
OPENAI_API_KEY=your_openai_api_key
LANGSMITH_TRACING=true
LANGSMITH_ENDPOINT=https://api.smith.langchain.com
LANGSMITH_API_KEY=your_langsmith_api_key
LANGSMITH_PROJECT=suntrace
GOOGLE_API_KEY=your_google_api_key
2 changes: 2 additions & 0 deletions .gcloudignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
!data/
!data/**
59 changes: 59 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
name: Build & Deploy to Cloud Run

on:
push:
branches:
- main
pull_request:
branches:
- main

permissions:
contents: read
id-token: write

jobs:
deploy:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Authenticate to GCP via Google Cloud Service Account Key JSON.
uses: google-github-actions/auth@v2
with:
credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'

- name: Set up gcloud SDK
uses: google-github-actions/setup-gcloud@v2
with:
project_id: ${{ secrets.GCP_PROJECT_ID }}

- name: Configure Docker for Artifact Registry
run: |
gcloud auth configure-docker ${{ secrets.GCP_REGION }}-docker.pkg.dev --quiet

# ← NEW STEP: fetch your geojson folder from GCS into ./data
- name: Download GeoJSON data from GCS
run: |
mkdir -p data
gsutil -m cp -r gs://${{ secrets.GCS_BUCKET }}/geojson/data/* data/

- name: Build Docker image
run: |
IMAGE=${{ secrets.GCP_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/${{ secrets.GCP_PROJECT_REPO }}/${{ secrets.APP_NAME }}:${{ github.sha }}
docker build -t $IMAGE .

- name: Push Docker image
run: |
IMAGE=${{ secrets.GCP_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/${{ secrets.GCP_PROJECT_REPO }}/${{ secrets.APP_NAME }}:${{ github.sha }}
docker push $IMAGE

- name: Deploy to Cloud Run
uses: google-github-actions/deploy-cloudrun@v2
with:
service: ${{ secrets.APP_NAME }}
image: ${{ secrets.GCP_REGION }}-docker.pkg.dev/${{ secrets.GCP_PROJECT_ID }}/${{ secrets.GCP_PROJECT_REPO }}/${{ secrets.APP_NAME }}:${{ github.sha }}
region: ${{ secrets.GCP_REGION }}
project_id: ${{ secrets.GCP_PROJECT_ID }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Ignore venv
env/
venv/
.venv/
# Ignore Python cache files
Expand Down
34 changes: 23 additions & 11 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,31 +1,43 @@
# Use slim Python 3.12 image
FROM python:3.12-slim

# Set environment variables
# Don’t write .pyc files and force stdout/stderr to be unbuffered
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
PYTHONUNBUFFERED=1 \
# Tell rasterio/geopandas where GDAL lives
GDAL_CONFIG=/usr/bin/gdal-config \
CPLUS_INCLUDE_PATH=/usr/include/gdal \
C_INCLUDE_PATH=/usr/include/gdal

# Set working directory
WORKDIR /app

# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
# Install build tools + GDAL + GEOS + PROJ headers
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential \
gcc \
libffi-dev \
libssl-dev \
python3-dev \
gdal-bin \
libgdal-dev \
libgeos-dev \
libproj-dev \
&& rm -rf /var/lib/apt/lists/*

# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --upgrade pip wheel && \
pip install --no-cache-dir -r requirements.txt

# Copy the app code
COPY . .

COPY ./start.sh /app/start.sh

# Expose the port FastAPI will run on
EXPOSE 8000
ENV PORT 8080
# EXPOSE 8000

# Run the app using Uvicorn
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]

RUN chmod +x /app/start.sh
# CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
ENTRYPOINT ["/app/start.sh"]
35 changes: 33 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,30 @@ python main.py
### 4. Run in Docker

```sh
export OPENAI_API_KEY=your_openai_key
docker build -t suntrace .
docker run -p 8000:8000 suntrace
docker run --rm -d \
-p 8080:8080 \
-e OPENAI_API_KEY="${OPENAI_API_KEY}" \
--name suntrace \
suntrace:latest
```

See logs

```sh
docker logs -f suntracte
```

#### With docker compose

```sh
docker-compose up -d --build
```

### 5. Access Frontend

Open [http://localhost:8000](http://localhost:8000) in your browser.
Open [http://localhost:8080](http://localhost:8080) in your browser.


## Testing
Expand All @@ -78,6 +95,20 @@ Create a `.env` file for secrets (e.g., OpenAI API key):
OPENAI_API_KEY=your_openai_key
```

## Deployment

Make sure you have [**gcloud cli**](https://cloud.google.com/sdk/docs/install-sdk) installed and setup

The app is deployed using [**Google Cloud Run**](https://cloud.google.com/run?hl=en)

To deploy the application, run the commands below

```sh
chmod +x bin/deploy
chmod +x start.sh
./bin/deploy
```

## Data Requirements

Place required geospatial files in the `data/` directory. See [tests/TESTING.md](tests/TESTING.md) for details.
Expand Down
19 changes: 19 additions & 0 deletions bin/deploy
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail

export APP=suntrace
export PROJECT_ID=sb-gcp-project-01
export REGION=europe-west1
export REPO=suntrace-repo
export TAG=${REGION}-docker.pkg.dev/${PROJECT_ID}/${REPO}/${APP}

# 1. Build & push through Cloud Build
gcloud builds submit --tag $TAG


# # 2. Deploy to Cloud Run
gcloud run deploy $APP \
--image $TAG \
--region $REGION \
--platform managed \
--allow-unauthenticated
11 changes: 11 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
services:
suntrace:
container_name: suntrace
# image: suntrace:latest
build:
context: .
dockerfile: Dockerfile
ports:
- "8080:8080"
env_file:
- .env
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
numpy==2.3.1
geopandas==1.1.1
dotenv==0.9.9
openai==1.58.1
openai==1.99.1
rasterio==1.4.3
earthengine_api==1.6.0
folium==0.20.0
Expand Down
4 changes: 4 additions & 0 deletions start.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

# Start the FastAPI application with Uvicorn
uvicorn main:app --host 0.0.0.0 --port ${PORT} --workers 1
15 changes: 11 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import os
import sys
import pytest
from pathlib import Path

import pytest

# Add src directory to Python path
project_root = Path(__file__).parent.parent
src_path = project_root
Expand All @@ -13,36 +14,42 @@
if str(project_root) not in sys.path:
sys.path.insert(0, str(project_root))


@pytest.fixture(scope="session")
def project_root_path():
"""Return the project root path."""
return project_root


@pytest.fixture(scope="session")
def data_dir_path(project_root_path):
"""Return the data directory path."""
return project_root_path / "data"


@pytest.fixture(scope="session")
def sample_data_paths(data_dir_path):
"""Return paths to sample data files."""
return {
"buildings": data_dir_path / "lamwo_buildings_V3.gpkg",
"minigrids": data_dir_path / "updated_candidate_minigrids_merged.gpkg",
"tile_stats": data_dir_path / "Lamwo_Tile_Stats_EE.csv",
"plain_tiles": data_dir_path / "lamwo_sentinel_composites" / "lamwo_grid.geojson",
"plain_tiles": data_dir_path
/ "lamwo_sentinel_composites"
/ "lamwo_grid.geojson",
"sample_region": data_dir_path / "sample_region_mudu" / "mudu_village.gpkg",
}


@pytest.fixture(scope="session")
def check_data_files(sample_data_paths):
"""Check if required data files exist and skip tests if not."""
missing_files = []
for name, path in sample_data_paths.items():
if not path.exists():
missing_files.append(f"{name}: {path}")

if missing_files:
pytest.skip(f"Missing data files: {', '.join(missing_files)}")

return sample_data_paths
Loading