Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions backend/migrations/003_create_tokens_table.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
-- Migration: Create tokens table
CREATE TABLE IF NOT EXISTS tokens (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
address VARCHAR(255) UNIQUE NOT NULL,
symbol VARCHAR(32),
name VARCHAR(128),
decimals INTEGER,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_tokens_address ON tokens(address);
10 changes: 10 additions & 0 deletions backend/migrations/004_create_organization_webhooks.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
-- Migration: Create organization_webhooks table
CREATE TABLE IF NOT EXISTS organization_webhooks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
organization_id UUID NOT NULL,
webhook_url VARCHAR(512) NOT NULL,
created_at TIMESTAMP DEFAULT NOW(),
updated_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_org_webhooks_org_id ON organization_webhooks(organization_id);
CREATE INDEX IF NOT EXISTS idx_org_webhooks_url ON organization_webhooks(webhook_url);
15 changes: 15 additions & 0 deletions backend/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,21 @@ app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerSpecs));
// Database connection and models
const { sequelize } = require('./database/connection');
const models = require('./models');
const { OrganizationWebhook } = models;
// Register webhook URL for organization
app.post('/api/admin/webhooks', async (req, res) => {
try {
const { organization_id, webhook_url } = req.body;
if (!organization_id || !webhook_url) {
return res.status(400).json({ success: false, error: 'organization_id and webhook_url are required' });
}
const webhook = await OrganizationWebhook.create({ organization_id, webhook_url });
res.status(201).json({ success: true, data: webhook });
} catch (error) {
console.error('Error registering webhook:', error);
res.status(500).json({ success: false, error: error.message });
}
});

// Services
const indexingService = require('./services/indexingService');
Expand Down
12 changes: 12 additions & 0 deletions backend/src/models/index.js
Original file line number Diff line number Diff line change
@@ -1,18 +1,30 @@
const { sequelize } = require('../database/connection');

const ClaimsHistory = require('./claimsHistory');
const Vault = require('./vault');
const SubSchedule = require('./subSchedule');
const TVL = require('./tvl');
const Beneficiary = require('./beneficiary');
const Organization = require('./organization');

const { Token, initTokenModel } = require('./token');
const { OrganizationWebhook, initOrganizationWebhookModel } = require('./organizationWebhook');



initTokenModel(sequelize);
initOrganizationWebhookModel(sequelize);


const models = {
ClaimsHistory,
Vault,
SubSchedule,
TVL,
Beneficiary,
Organization,
Token,
OrganizationWebhook,
sequelize,
};

Expand Down
41 changes: 41 additions & 0 deletions backend/src/models/organizationWebhook.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
const { DataTypes, Model } = require('sequelize');

class OrganizationWebhook extends Model {}

function initOrganizationWebhookModel(sequelize) {
OrganizationWebhook.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
organization_id: {
type: DataTypes.UUID,
allowNull: false,
},
webhook_url: {
type: DataTypes.STRING(512),
allowNull: false,
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
},
},
{
sequelize,
tableName: 'organization_webhooks',
indexes: [
{ fields: ['organization_id'] },
{ fields: ['webhook_url'] }
],
}
);
}

module.exports = { OrganizationWebhook, initOrganizationWebhookModel };
44 changes: 44 additions & 0 deletions backend/src/models/token.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
const { DataTypes, Model } = require('sequelize');

class Token extends Model {}

function initTokenModel(sequelize) {
Token.init(
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
address: {
type: DataTypes.STRING,
allowNull: false,
unique: true,
},
symbol: {
type: DataTypes.STRING(32),
},
name: {
type: DataTypes.STRING(128),
},
decimals: {
type: DataTypes.INTEGER,
},
createdAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
},
updatedAt: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
},
},
{
sequelize,
tableName: 'tokens',
indexes: [{ fields: ['address'] }],
}
);
}

module.exports = { Token, initTokenModel };
16 changes: 16 additions & 0 deletions backend/src/services/indexingService.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,22 @@ class IndexingService {

// Emit internal claim event for WebSocket gateway
claimEventEmitter.emit('claim', claim.toJSON());

// Fire webhook POST for DAOs
const { OrganizationWebhook } = require('../models');
const axios = require('axios');
// Find webhooks for the organization (if vault has organization_id)
if (claim.organization_id) {
const webhooks = await OrganizationWebhook.findAll({ where: { organization_id: claim.organization_id } });
for (const webhook of webhooks) {
try {
await axios.post(webhook.webhook_url, claim.toJSON());
console.log(`Webhook fired: ${webhook.webhook_url}`);
} catch (err) {
console.error(`Webhook failed: ${webhook.webhook_url}`, err);
}
}
}
return claim;
} catch (error) {
console.error('Error processing claim:', error);
Expand Down
62 changes: 62 additions & 0 deletions backend/src/services/tokenMetadataWorker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
const { Token } = require('../models/token');
const Vault = require('../models/vault');
const axios = require('axios');

const SOROBAN_RPC_URL = process.env.SOROBAN_RPC_URL || 'https://soroban-rpc.testnet.stellar.org';

/**
* Worker to detect new token addresses and fetch/store their metadata.
*/
class TokenMetadataWorker {
constructor(sequelize) {
this.sequelize = sequelize;
}

async detectAndFetchNewTokens() {
// 1. Get all unique token addresses from Vaults
const vaults = await Vault.findAll({ attributes: ['token_address'] });
const addresses = vaults.map(function(v) { return v.token_address; });
const uniqueAddresses = Array.from(new Set(addresses));

// 2. For each address, check if it exists in tokens table
for (let i = 0; i < uniqueAddresses.length; i++) {
const address = uniqueAddresses[i];
const exists = await Token.findOne({ where: { address } });
if (!exists) {
// 3. Fetch metadata from Stellar
try {
const meta = await this.fetchTokenMetadata(address);
if (meta) {
await Token.create({
address: address,
symbol: meta.symbol,
name: meta.name,
decimals: meta.decimals,
});
console.log(`Token metadata stored for ${address}`);
}
} catch (err) {
console.error(`Failed to fetch/store metadata for ${address}:`, err);
}
}
}
}

async fetchTokenMetadata(address) {
// Example: Replace with actual Soroban RPC call
try {
const response = await axios.post(`${SOROBAN_RPC_URL}/getTokenMetadata`, { address: address });
const symbol = response.data.symbol;
const name = response.data.name;
const decimals = response.data.decimals;
if (symbol && name && typeof decimals === 'number') {
return { symbol: symbol, name: name, decimals: decimals };
}
return null;
} catch (err) {
return null;
}
}
}

module.exports = { TokenMetadataWorker };
10 changes: 10 additions & 0 deletions backend/src/workers/tokenMetaWorker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
const { sequelize } = require('../models');
const { TokenMetadataWorker } = require('../services/tokenMetadataWorker');

async function runWorker() {
const worker = new TokenMetadataWorker(sequelize);
await worker.detectAndFetchNewTokens();
process.exit(0);
}

runWorker();
43 changes: 43 additions & 0 deletions backup_postgres.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/bin/bash
# Automated Postgres backup script

set -e

# Config
PG_DB="vestingvault"
PG_USER="postgres"
PG_HOST="localhost"
BACKUP_DIR="/var/backups/vestingvault"
DATE=$(date +"%Y-%m-%d_%H-%M-%S")
DUMP_FILE="$BACKUP_DIR/backup_$DATE.sql"
ARCHIVE_FILE="$DUMP_FILE.gz"

# Ensure backup directory exists
mkdir -p "$BACKUP_DIR"

# Dump Postgres database
pg_dump -h "$PG_HOST" -U "$PG_USER" "$PG_DB" > "$DUMP_FILE"

# Compress the dump
gzip "$DUMP_FILE"

# Upload to S3 (requires AWS CLI configured with encryption)
S3_BUCKET="s3://vestingvault-backups"
aws s3 cp "$ARCHIVE_FILE" "$S3_BUCKET/" --sse AES256

# Cleanup old backups (local and S3)
find "$BACKUP_DIR" -name "*.gz" -mtime +30 -exec rm {} \;
aws s3 ls "$S3_BUCKET/" | awk '{print $4}' | while read file; do
# Extract date from filename and check if older than 30 days
FILE_DATE=$(echo $file | grep -oP '\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}')
if [[ $FILE_DATE ]]; then
FILE_TIMESTAMP=$(date -d "$FILE_DATE" +%s)
THIRTY_DAYS_AGO=$(date -d '30 days ago' +%s)
if (( FILE_TIMESTAMP < THIRTY_DAYS_AGO )); then
aws s3 rm "$S3_BUCKET/$file"
fi
fi
done

# Log
echo "Backup completed and uploaded to S3: $ARCHIVE_FILE"