From 73d591e6e2579c839c503edebfbfa54487458649 Mon Sep 17 00:00:00 2001 From: OthmanImam Date: Mon, 23 Feb 2026 11:21:32 +0100 Subject: [PATCH 1/3] feat: Token metadata worker for Stellar tokens (detect, fetch, persist) --- .../migrations/003_create_tokens_table.sql | 11 ++++ backend/src/models/index.js | 6 ++ backend/src/models/token.js | 44 +++++++++++++ backend/src/services/tokenMetadataWorker.js | 62 +++++++++++++++++++ backend/src/workers/tokenMetaWorker.js | 10 +++ 5 files changed, 133 insertions(+) create mode 100644 backend/migrations/003_create_tokens_table.sql create mode 100644 backend/src/models/token.js create mode 100644 backend/src/services/tokenMetadataWorker.js create mode 100644 backend/src/workers/tokenMetaWorker.js diff --git a/backend/migrations/003_create_tokens_table.sql b/backend/migrations/003_create_tokens_table.sql new file mode 100644 index 0000000..503feb9 --- /dev/null +++ b/backend/migrations/003_create_tokens_table.sql @@ -0,0 +1,11 @@ +-- Migration: Create tokens table +CREATE TABLE IF NOT EXISTS tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + address VARCHAR(255) UNIQUE NOT NULL, + symbol VARCHAR(32), + name VARCHAR(128), + decimals INTEGER, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_tokens_address ON tokens(address); diff --git a/backend/src/models/index.js b/backend/src/models/index.js index e63f03e..abba23b 100644 --- a/backend/src/models/index.js +++ b/backend/src/models/index.js @@ -1,10 +1,15 @@ const { sequelize } = require('../database/connection'); + const ClaimsHistory = require('./claimsHistory'); const Vault = require('./vault'); const SubSchedule = require('./subSchedule'); const TVL = require('./tvl'); const Beneficiary = require('./beneficiary'); const Organization = require('./organization'); +const { Token, initTokenModel } = require('./token'); + + +initTokenModel(sequelize); const models = { ClaimsHistory, @@ -13,6 +18,7 @@ const models = { TVL, Beneficiary, Organization, + Token, sequelize, }; diff --git a/backend/src/models/token.js b/backend/src/models/token.js new file mode 100644 index 0000000..cfd413d --- /dev/null +++ b/backend/src/models/token.js @@ -0,0 +1,44 @@ +const { DataTypes, Model } = require('sequelize'); + +class Token extends Model {} + +function initTokenModel(sequelize) { + Token.init( + { + id: { + type: DataTypes.UUID, + defaultValue: DataTypes.UUIDV4, + primaryKey: true, + }, + address: { + type: DataTypes.STRING, + allowNull: false, + unique: true, + }, + symbol: { + type: DataTypes.STRING(32), + }, + name: { + type: DataTypes.STRING(128), + }, + decimals: { + type: DataTypes.INTEGER, + }, + createdAt: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + updatedAt: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + }, + { + sequelize, + tableName: 'tokens', + indexes: [{ fields: ['address'] }], + } + ); +} + +module.exports = { Token, initTokenModel }; diff --git a/backend/src/services/tokenMetadataWorker.js b/backend/src/services/tokenMetadataWorker.js new file mode 100644 index 0000000..8131ae2 --- /dev/null +++ b/backend/src/services/tokenMetadataWorker.js @@ -0,0 +1,62 @@ +const { Token } = require('../models/token'); +const Vault = require('../models/vault'); +const axios = require('axios'); + +const SOROBAN_RPC_URL = process.env.SOROBAN_RPC_URL || 'https://soroban-rpc.testnet.stellar.org'; + +/** + * Worker to detect new token addresses and fetch/store their metadata. + */ +class TokenMetadataWorker { + constructor(sequelize) { + this.sequelize = sequelize; + } + + async detectAndFetchNewTokens() { + // 1. Get all unique token addresses from Vaults + const vaults = await Vault.findAll({ attributes: ['token_address'] }); + const addresses = vaults.map(function(v) { return v.token_address; }); + const uniqueAddresses = Array.from(new Set(addresses)); + + // 2. For each address, check if it exists in tokens table + for (let i = 0; i < uniqueAddresses.length; i++) { + const address = uniqueAddresses[i]; + const exists = await Token.findOne({ where: { address } }); + if (!exists) { + // 3. Fetch metadata from Stellar + try { + const meta = await this.fetchTokenMetadata(address); + if (meta) { + await Token.create({ + address: address, + symbol: meta.symbol, + name: meta.name, + decimals: meta.decimals, + }); + console.log(`Token metadata stored for ${address}`); + } + } catch (err) { + console.error(`Failed to fetch/store metadata for ${address}:`, err); + } + } + } + } + + async fetchTokenMetadata(address) { + // Example: Replace with actual Soroban RPC call + try { + const response = await axios.post(`${SOROBAN_RPC_URL}/getTokenMetadata`, { address: address }); + const symbol = response.data.symbol; + const name = response.data.name; + const decimals = response.data.decimals; + if (symbol && name && typeof decimals === 'number') { + return { symbol: symbol, name: name, decimals: decimals }; + } + return null; + } catch (err) { + return null; + } + } +} + +module.exports = { TokenMetadataWorker }; diff --git a/backend/src/workers/tokenMetaWorker.js b/backend/src/workers/tokenMetaWorker.js new file mode 100644 index 0000000..84ac170 --- /dev/null +++ b/backend/src/workers/tokenMetaWorker.js @@ -0,0 +1,10 @@ +const { sequelize } = require('../models'); +const { TokenMetadataWorker } = require('../services/tokenMetadataWorker'); + +async function runWorker() { + const worker = new TokenMetadataWorker(sequelize); + await worker.detectAndFetchNewTokens(); + process.exit(0); +} + +runWorker(); From b96e39480634df0a8ac5fcdd37e76bd993314ce9 Mon Sep 17 00:00:00 2001 From: OthmanImam Date: Mon, 23 Feb 2026 11:40:00 +0100 Subject: [PATCH 2/3] ops: automated Postgres backup script with S3 retention --- backup_postgres.sh | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 backup_postgres.sh diff --git a/backup_postgres.sh b/backup_postgres.sh new file mode 100644 index 0000000..27ebdc5 --- /dev/null +++ b/backup_postgres.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Automated Postgres backup script + +set -e + +# Config +PG_DB="vestingvault" +PG_USER="postgres" +PG_HOST="localhost" +BACKUP_DIR="/var/backups/vestingvault" +DATE=$(date +"%Y-%m-%d_%H-%M-%S") +DUMP_FILE="$BACKUP_DIR/backup_$DATE.sql" +ARCHIVE_FILE="$DUMP_FILE.gz" + +# Ensure backup directory exists +mkdir -p "$BACKUP_DIR" + +# Dump Postgres database +pg_dump -h "$PG_HOST" -U "$PG_USER" "$PG_DB" > "$DUMP_FILE" + +# Compress the dump +gzip "$DUMP_FILE" + +# Upload to S3 (requires AWS CLI configured with encryption) +S3_BUCKET="s3://vestingvault-backups" +aws s3 cp "$ARCHIVE_FILE" "$S3_BUCKET/" --sse AES256 + +# Cleanup old backups (local and S3) +find "$BACKUP_DIR" -name "*.gz" -mtime +30 -exec rm {} \; +aws s3 ls "$S3_BUCKET/" | awk '{print $4}' | while read file; do + # Extract date from filename and check if older than 30 days + FILE_DATE=$(echo $file | grep -oP '\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}') + if [[ $FILE_DATE ]]; then + FILE_TIMESTAMP=$(date -d "$FILE_DATE" +%s) + THIRTY_DAYS_AGO=$(date -d '30 days ago' +%s) + if (( FILE_TIMESTAMP < THIRTY_DAYS_AGO )); then + aws s3 rm "$S3_BUCKET/$file" + fi + fi +done + +# Log +echo "Backup completed and uploaded to S3: $ARCHIVE_FILE" From a674400e558168b5aee86b0f367e80aed5b13c42 Mon Sep 17 00:00:00 2001 From: OthmanImam Date: Mon, 23 Feb 2026 11:43:34 +0100 Subject: [PATCH 3/3] feat: DAO webhook registration and claim event trigger --- .../004_create_organization_webhooks.sql | 10 +++++ backend/src/index.js | 15 +++++++ backend/src/models/index.js | 6 +++ backend/src/models/organizationWebhook.js | 41 +++++++++++++++++++ backend/src/services/indexingService.js | 16 ++++++++ 5 files changed, 88 insertions(+) create mode 100644 backend/migrations/004_create_organization_webhooks.sql create mode 100644 backend/src/models/organizationWebhook.js diff --git a/backend/migrations/004_create_organization_webhooks.sql b/backend/migrations/004_create_organization_webhooks.sql new file mode 100644 index 0000000..0a5f70e --- /dev/null +++ b/backend/migrations/004_create_organization_webhooks.sql @@ -0,0 +1,10 @@ +-- Migration: Create organization_webhooks table +CREATE TABLE IF NOT EXISTS organization_webhooks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + organization_id UUID NOT NULL, + webhook_url VARCHAR(512) NOT NULL, + created_at TIMESTAMP DEFAULT NOW(), + updated_at TIMESTAMP DEFAULT NOW() +); +CREATE INDEX IF NOT EXISTS idx_org_webhooks_org_id ON organization_webhooks(organization_id); +CREATE INDEX IF NOT EXISTS idx_org_webhooks_url ON organization_webhooks(webhook_url); diff --git a/backend/src/index.js b/backend/src/index.js index 73f716e..4bca77f 100644 --- a/backend/src/index.js +++ b/backend/src/index.js @@ -27,6 +27,21 @@ app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerSpecs)); // Database connection and models const { sequelize } = require('./database/connection'); const models = require('./models'); +const { OrganizationWebhook } = models; +// Register webhook URL for organization +app.post('/api/admin/webhooks', async (req, res) => { + try { + const { organization_id, webhook_url } = req.body; + if (!organization_id || !webhook_url) { + return res.status(400).json({ success: false, error: 'organization_id and webhook_url are required' }); + } + const webhook = await OrganizationWebhook.create({ organization_id, webhook_url }); + res.status(201).json({ success: true, data: webhook }); + } catch (error) { + console.error('Error registering webhook:', error); + res.status(500).json({ success: false, error: error.message }); + } +}); // Services const indexingService = require('./services/indexingService'); diff --git a/backend/src/models/index.js b/backend/src/models/index.js index abba23b..785059b 100644 --- a/backend/src/models/index.js +++ b/backend/src/models/index.js @@ -6,10 +6,15 @@ const SubSchedule = require('./subSchedule'); const TVL = require('./tvl'); const Beneficiary = require('./beneficiary'); const Organization = require('./organization'); + const { Token, initTokenModel } = require('./token'); +const { OrganizationWebhook, initOrganizationWebhookModel } = require('./organizationWebhook'); + initTokenModel(sequelize); +initOrganizationWebhookModel(sequelize); + const models = { ClaimsHistory, @@ -19,6 +24,7 @@ const models = { Beneficiary, Organization, Token, + OrganizationWebhook, sequelize, }; diff --git a/backend/src/models/organizationWebhook.js b/backend/src/models/organizationWebhook.js new file mode 100644 index 0000000..f398d4f --- /dev/null +++ b/backend/src/models/organizationWebhook.js @@ -0,0 +1,41 @@ +const { DataTypes, Model } = require('sequelize'); + +class OrganizationWebhook extends Model {} + +function initOrganizationWebhookModel(sequelize) { + OrganizationWebhook.init( + { + id: { + type: DataTypes.UUID, + defaultValue: DataTypes.UUIDV4, + primaryKey: true, + }, + organization_id: { + type: DataTypes.UUID, + allowNull: false, + }, + webhook_url: { + type: DataTypes.STRING(512), + allowNull: false, + }, + createdAt: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + updatedAt: { + type: DataTypes.DATE, + defaultValue: DataTypes.NOW, + }, + }, + { + sequelize, + tableName: 'organization_webhooks', + indexes: [ + { fields: ['organization_id'] }, + { fields: ['webhook_url'] } + ], + } + ); +} + +module.exports = { OrganizationWebhook, initOrganizationWebhookModel }; diff --git a/backend/src/services/indexingService.js b/backend/src/services/indexingService.js index 09545cf..096bd41 100644 --- a/backend/src/services/indexingService.js +++ b/backend/src/services/indexingService.js @@ -55,6 +55,22 @@ class IndexingService { // Emit internal claim event for WebSocket gateway claimEventEmitter.emit('claim', claim.toJSON()); + + // Fire webhook POST for DAOs + const { OrganizationWebhook } = require('../models'); + const axios = require('axios'); + // Find webhooks for the organization (if vault has organization_id) + if (claim.organization_id) { + const webhooks = await OrganizationWebhook.findAll({ where: { organization_id: claim.organization_id } }); + for (const webhook of webhooks) { + try { + await axios.post(webhook.webhook_url, claim.toJSON()); + console.log(`Webhook fired: ${webhook.webhook_url}`); + } catch (err) { + console.error(`Webhook failed: ${webhook.webhook_url}`, err); + } + } + } return claim; } catch (error) { console.error('Error processing claim:', error);