Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions backend/.env.production.example
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,8 @@ FRONTEND_URL=https://example.com
VAPID_PUBLIC_KEY=
VAPID_PRIVATE_KEY=
VAPID_SUBJECT=mailto:admin@example.com

# R2 Backup (optional)
# Set this to enable backups to Cloudflare R2
# Run: sudo ./deploy/setup-r2-backup.sh to configure
R2_BUCKET=
72 changes: 71 additions & 1 deletion deploy/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,76 @@ This will:

## Backup Strategy

### Local Backups (Legacy)

- Daily SQLite database backups at `/srv/dogbook/backups/`
- Keeps last 30 days of backups
- Images are NOT backed up (stored in `/srv/dogbook/data/images/`)
- Run by `dogbook-backup.timer`

### R2 Cloud Backups (Recommended)

Full backup of database AND images to Cloudflare R2 with tiered retention:

- **Daily backups**: kept for 7 days
- **Weekly backups** (Sundays): kept for 12 weeks (~3 months)
- **Monthly backups** (1st of month): kept for 12 months (1 year)
- **Images**: synced incrementally (no duplicates - only uploads new/changed files)

**Files:**
- `r2-backup.sh` - Main backup script
- `r2-restore.sh` - Restore from backup
- `setup-r2-backup.sh` - One-time setup script
- `dogbook-r2-backup.service` - Systemd service
- `dogbook-r2-backup.timer` - Runs daily at 3 AM

**Setup R2 Backups:**

```bash
# Run the setup script (configures rclone and installs timer)
sudo ./deploy/setup-r2-backup.sh
```

You'll need:
1. Cloudflare R2 bucket created
2. R2 API token with read/write access
3. Account ID (from dashboard URL)

**Manual Commands:**

```bash
# Check backup status
systemctl status dogbook-r2-backup.timer

# Manually trigger a backup
sudo systemctl start dogbook-r2-backup.service

# View backup logs
journalctl -u dogbook-r2-backup.service

# List all backups
sudo -u dogbook /srv/dogbook/deploy/r2-backup.sh list
```

**Restore from Backup:**

```bash
# List available backups
sudo /srv/dogbook/deploy/r2-restore.sh list

# Restore latest database backup
sudo /srv/dogbook/deploy/r2-restore.sh latest

# Restore database from specific date
sudo /srv/dogbook/deploy/r2-restore.sh db 2024-01-15

# Restore images
sudo /srv/dogbook/deploy/r2-restore.sh images

# Full restore (database + images)
sudo /srv/dogbook/deploy/r2-restore.sh all
```

**Estimated Storage Usage:**
- Database: ~30-90 MB (31 copies with tiered retention)
- Images: ~300 MB - 1.5 GB (stored once, synced incrementally)
- Total: ~400 MB - 2 GB initially, growing with usage
18 changes: 18 additions & 0 deletions deploy/dogbook-r2-backup.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[Unit]
Description=Backup Dogbook Database and Images to R2
After=network-online.target
Wants=network-online.target

[Service]
Type=oneshot
User=dogbook
ExecStart=/srv/dogbook/deploy/r2-backup.sh backup
StandardOutput=journal
StandardError=journal

# Allow up to 30 minutes for the backup (in case of large image sync)
TimeoutStartSec=1800

# Retry on failure
Restart=on-failure
RestartSec=60
13 changes: 13 additions & 0 deletions deploy/dogbook-r2-backup.timer
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[Unit]
Description=Daily Dogbook R2 Backup Timer

[Timer]
# Run at 3:00 AM daily (off-peak hours)
OnCalendar=*-*-* 03:00:00
# Run immediately if last run was missed (e.g., server was down)
Persistent=true
# Add random delay up to 15 minutes to avoid thundering herd
RandomizedDelaySec=900

[Install]
WantedBy=timers.target
198 changes: 198 additions & 0 deletions deploy/r2-backup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
#!/bin/bash
# R2 Backup Script for Dogbook
# Backs up SQLite database and images to Cloudflare R2
#
# Retention policy:
# - Daily backups: kept for 7 days
# - Weekly backups (Sundays): kept for 3 months (~12 weeks)
# - Monthly backups (1st of month): kept for 1 year (~12 months)
#
# Images are synced incrementally (no duplicates - only uploads new/changed files)

set -euo pipefail

# Load environment variables
ENV_FILE="/srv/dogbook/data/.env"
if [ -f "$ENV_FILE" ]; then
set -a
source "$ENV_FILE"
set +a
fi

# Configuration
DATA_DIR="/srv/dogbook/data"
DB_FILE="$DATA_DIR/keystone.db"
IMAGES_DIR="$DATA_DIR/images"
TMP_DIR="/tmp/dogbook-backup"
R2_BUCKET="${R2_BUCKET:-}"

# Validate required environment variables
if [ -z "$R2_BUCKET" ]; then
echo "ERROR: R2_BUCKET environment variable is not set" >&2
echo "Add R2_BUCKET=your-bucket-name to $ENV_FILE" >&2
exit 1
fi

# Date calculations
TODAY=$(date +%Y-%m-%d)
DAY_OF_WEEK=$(date +%u) # 1=Monday, 7=Sunday
DAY_OF_MONTH=$(date +%d)

# Logging
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1"
}

error() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] ERROR: $1" >&2
}

# Check prerequisites
check_prerequisites() {
if ! command -v rclone &> /dev/null; then
error "rclone is not installed. Install with: curl https://rclone.org/install.sh | sudo bash"
exit 1
fi

if ! rclone listremotes | grep -q "^r2:$"; then
error "rclone remote 'r2' is not configured. Run: rclone config"
exit 1
fi

if [ ! -f "$DB_FILE" ]; then
error "Database file not found: $DB_FILE"
exit 1
fi
}

# Create a safe SQLite backup using .backup command
backup_database() {
log "Backing up SQLite database..."

mkdir -p "$TMP_DIR"
local backup_file="$TMP_DIR/keystone-$TODAY.db"
local compressed_file="$backup_file.gz"

# Use SQLite's .backup command for a consistent backup
sqlite3 "$DB_FILE" ".backup '$backup_file'"

# Compress the backup
gzip -f "$backup_file"

log "Database backup created: $compressed_file ($(du -h "$compressed_file" | cut -f1))"

# Upload to daily folder
log "Uploading daily backup..."
rclone copyto "$compressed_file" "r2:$R2_BUCKET/db/daily/keystone-$TODAY.db.gz"

# Also keep a 'latest' copy for easy restore
rclone copyto "$compressed_file" "r2:$R2_BUCKET/latest/keystone.db.gz"

# Weekly backup (Sunday)
if [ "$DAY_OF_WEEK" -eq 7 ]; then
log "Creating weekly backup (Sunday)..."
rclone copyto "$compressed_file" "r2:$R2_BUCKET/db/weekly/keystone-$TODAY.db.gz"
fi

# Monthly backup (1st of month)
if [ "$DAY_OF_MONTH" -eq "01" ]; then
log "Creating monthly backup (1st of month)..."
rclone copyto "$compressed_file" "r2:$R2_BUCKET/db/monthly/keystone-$TODAY.db.gz"
fi

# Clean up temp file
rm -f "$compressed_file"
}

# Sync images to R2 (incremental - only uploads new/changed files)
sync_images() {
if [ ! -d "$IMAGES_DIR" ]; then
log "Images directory not found, skipping image sync"
return
fi

log "Syncing images to R2 (incremental)..."

# Use rclone sync with checksum to avoid re-uploading unchanged files
# --checksum: Compare by checksum instead of mod-time/size (more reliable)
# --progress: Show transfer progress
rclone sync "$IMAGES_DIR" "r2:$R2_BUCKET/images/" \
--checksum \
--transfers 4 \
--stats-one-line \
-v

log "Image sync complete"
}

# Apply retention policy - delete old backups
apply_retention() {
log "Applying retention policy..."

# Daily backups: keep last 7 days
log "Cleaning daily backups (keeping last 7 days)..."
rclone delete "r2:$R2_BUCKET/db/daily/" \
--min-age 7d \
-v 2>&1 | grep -v "^$" || true

# Weekly backups: keep last ~12 weeks (90 days)
log "Cleaning weekly backups (keeping last 12 weeks)..."
rclone delete "r2:$R2_BUCKET/db/weekly/" \
--min-age 90d \
-v 2>&1 | grep -v "^$" || true

# Monthly backups: keep last 12 months (365 days)
log "Cleaning monthly backups (keeping last 12 months)..."
rclone delete "r2:$R2_BUCKET/db/monthly/" \
--min-age 365d \
-v 2>&1 | grep -v "^$" || true

log "Retention policy applied"
}

# List current backups
list_backups() {
log "Current backups in R2:"
echo ""
echo "=== Daily Backups ==="
rclone ls "r2:$R2_BUCKET/db/daily/" 2>/dev/null || echo "(none)"
echo ""
echo "=== Weekly Backups ==="
rclone ls "r2:$R2_BUCKET/db/weekly/" 2>/dev/null || echo "(none)"
echo ""
echo "=== Monthly Backups ==="
rclone ls "r2:$R2_BUCKET/db/monthly/" 2>/dev/null || echo "(none)"
echo ""
echo "=== Images ==="
local image_count=$(rclone ls "r2:$R2_BUCKET/images/" 2>/dev/null | wc -l)
local image_size=$(rclone size "r2:$R2_BUCKET/images/" 2>/dev/null | grep "Total size" | awk '{print $3, $4}')
echo "Files: $image_count, Size: ${image_size:-0}"
}

# Main execution
main() {
local action="${1:-backup}"

case "$action" in
backup)
log "Starting Dogbook R2 backup..."
check_prerequisites
backup_database
sync_images
apply_retention
log "Backup complete!"
;;
list)
check_prerequisites
list_backups
;;
*)
echo "Usage: $0 [backup|list]"
echo " backup - Run full backup (default)"
echo " list - List current backups"
exit 1
;;
esac
}

main "$@"
Loading
Loading