diff --git a/SHARP/database/init.sql b/SHARP/database/init.sql index f9cb53c..dfc157d 100644 --- a/SHARP/database/init.sql +++ b/SHARP/database/init.sql @@ -196,4 +196,19 @@ CREATE TABLE IF NOT EXISTS used_hashcash_tokens ( token TEXT PRIMARY KEY, expires_at TIMESTAMPTZ NOT NULL ); -CREATE INDEX IF NOT EXISTS idx_used_hashcash_tokens_expires_at ON used_hashcash_tokens(expires_at); \ No newline at end of file +CREATE INDEX IF NOT EXISTS idx_used_hashcash_tokens_expires_at ON used_hashcash_tokens(expires_at); + +DROP TYPE IF EXISTS migration_status; + +CREATE TYPE migration_status AS ENUM ( + 'scheduled', -- Initial state + 'running', -- Migration in progress + 'failed', -- Migration failed + 'migrated' -- Successfully migrated +); + +CREATE TABLE IF NOT EXISTS migrations ( + id TEXT PRIMARY KEY, + status migration_status DEFAULT 'scheduled', + modfied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); diff --git a/SHARP/database/migrations/00_migration_table.sql b/SHARP/database/migrations/00_migration_table.sql new file mode 100644 index 0000000..c89138a --- /dev/null +++ b/SHARP/database/migrations/00_migration_table.sql @@ -0,0 +1,14 @@ +DROP TYPE IF EXISTS migration_status; + +CREATE TYPE migration_status AS ENUM ( + 'scheduled', -- Initial state + 'running', -- Migration in progress + 'failed', -- Migration failed + 'migrated' -- Successfully migrated +); + +CREATE TABLE IF NOT EXISTS migrations ( + id TEXT PRIMARY KEY, + status migration_status DEFAULT 'scheduled', + modfied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); diff --git a/SHARP/database/migrations/README.md b/SHARP/database/migrations/README.md new file mode 100644 index 0000000..16bfe93 --- /dev/null +++ b/SHARP/database/migrations/README.md @@ -0,0 +1,5 @@ +# Database Migrations + +Add here the databayse migration, you want to run, also adjust the `../init.sql` file to include those chnages. + +After that, also add them to the `MIGRATIONS` array, so that they are applied automatically. diff --git a/SHARP/main.js b/SHARP/main.js index 8ba2b32..294b94c 100644 --- a/SHARP/main.js +++ b/SHARP/main.js @@ -5,6 +5,8 @@ import postgres from 'postgres' import { resolveSrv, verifySharpDomain } from './dns-utils.js' import { validateAuthToken } from './middleware/auth.js' import { createHash } from 'crypto' +import path from "path" +import fs from "fs" const SHARP_PORT = +process.env.SHARP_PORT || 5000 const HTTP_PORT = +process.env.HTTP_PORT || SHARP_PORT + 1 @@ -15,69 +17,6 @@ const MAX_USERNAME_LENGTH = 20; const sql = postgres(process.env.DATABASE_URL) -// Cleanup for pending emails -setInterval(async () => { - try { - await sql` - UPDATE emails - SET status = 'failed', - error_message = 'Timed out while pending' - WHERE status = 'pending' - AND sent_at < NOW() - INTERVAL '30 seconds' - `; - } catch (error) { - console.error('Error updating stale pending emails:', error); - } -}, 10000); - -// Cleanup for expired emails -setInterval(async () => { - try { - const toDelete = await sql` - WITH RECURSIVE to_delete AS ( - SELECT id - FROM emails - WHERE expires_at < NOW() - AND expires_at IS NOT NULL - UNION ALL - SELECT e.id - FROM emails e - JOIN to_delete td ON e.reply_to_id = td.id - ) - SELECT id FROM to_delete - `; - - if (toDelete.length > 0) { - const ids = toDelete.map(r => r.id); - await sql` - DELETE FROM attachments - WHERE email_id = ANY(${ids}) - `; - await sql` - DELETE FROM emails - WHERE id = ANY(${ids}) - `; - } - } catch (error) { - console.error('Error cleaning up expired emails:', error); - } -}, 10000); - -// Cleanup for used hashcash tokens -setInterval(async () => { - try { - const result = await sql` - DELETE FROM used_hashcash_tokens - WHERE expires_at < NOW() - `; - if (result.count > 0) { - console.log(`Cleaned up ${result.count} expired hashcash tokens.`); - } - } catch (error) { - console.error('Error cleaning up used hashcash tokens:', error); - } -}, 3600000); // Run every hour - const PROTOCOL_VERSION = 'SHARP/1.3' const KEYWORDS = { @@ -517,12 +456,6 @@ async function processScheduledEmails() { } } -processScheduledEmails(); -setInterval(processScheduledEmails, 60000); - -const app = express() -app.use(cors(), express.json()) - function parseHashcashDate(dateString) { const year = parseInt(dateString.substring(0, 2), 10) + 2000; const month = parseInt(dateString.substring(2, 4), 10) - 1; // Month is 0-indexed @@ -617,255 +550,551 @@ function checkVocabulary(text, iq) { return { isValid: true, limit: maxWordLength }; } -app.post('/send', validateAuthToken, async (req, res) => { - let logEntry; - let emailId; - try { - const { hashcash, ...emailData } = req.body; - - let fp, tp; - try { - fp = parseSharpAddress(emailData.from); - tp = parseSharpAddress(emailData.to); - } catch { - return res.status(400).json({ - success: false, - message: 'Invalid SHARP address format' - }); +const MIGRATIONS_INIT_MIGRATION = {file:"00_migration_table.sql", id: "00_migration_table"} // this needs to be run first, as it creates the migrations table, it is also not noted in the migrations table itself, the existence of the table is the hypotetical entry in it + +/** + * @description Migration Structure + * @typedef {object} Migration + * @property {string} [description] - description for that migration, only visible in error messages + * @property {string} file - the filename inside 'migrations/', you also can use subfolders by specifying e.g. 'subfolder/file.sql' + * @property {string} id - the id, by which this migration is stored in the migrations table, doesn't have to be the same as file without suffix, but it is recommended to set it to that, HAS to be UNIQUE + * @property {function} [needsMigration] - if set this function is called asynchronously to check if a migration is needed. This is onyl needed for older migrations, where some users / people who self hosted this ran migrations manually, so running them again automatically will fail + */ + +const MIGRATIONS = [ + { + description: "add two new fields to the users table", + file: "5-20-2025.sql", + id: "5-20-2025", + needsMigration : async ()=>{ + const tables = await sql` + SELECT * + FROM information_schema.columns + WHERE table_schema = 'public' + AND table_name='users' + AND column_name= 'ip'; + ` + + // no ip column is present + return tables.length === 0; } + }, + { + description: "add table for new hashcash features", + file: "5-21.2025.sql", + id: "5-21.2025", + needsMigration : async ()=>{ + const tablePresent = await tableExists("used_hashcash_tokens") + + // table 'used_hashcash_tokens' is not present + return !tablePresent + } + }, - if (!req.turnstileVerified) { - return res.status(403).json({ - success: false, - message: 'Turnstile verification failed. Please try again.' - }); +] + +async function getMigrationStatus(id){ + + const migration = await sql` + SELECT * FROM migrations + WHERE id = ${id} + `; + + if(migration.length == 0){ + return undefined; + } + + return migration[0].status; + +} + +async function createMigrationStatus(id){ + await sql` + INSERT INTO migrations (id, status, modfied_at) + VALUES (${id}, 'scheduled', NOW()) + `; +} + +async function setMigrationStatus(id, status){ + await sql` + UPDATE migrations + SET status = ${status}, + modfied_at = NOW() + WHERE id = ${id} + `; +} + +async function runSingleMigration(migrationsDir, migration, updateStatus = true){ + + const setMigrationStatusHelper = async (id, status) => { + if(!updateStatus){ + return; } - const spamScore = await calculateSpamScore(hashcash, emailData.to); - let status = 'pending'; + await setMigrationStatus(id, status) + + } + + const {file, id, needsMigration, description} = migration; + + // undefined means, no entry with that id was found + const migrationStatus = updateStatus ? await getMigrationStatus(id) : null + + + if(migrationStatus === undefined){ + await createMigrationStatus(id) + }else if(migrationStatus === null){ + // do nothing, updateStatus is set to false + }else if(migrationStatus !== "scheduled"){ + // migration was already executed + return "skipped" + } + + setMigrationStatusHelper(id, "running"); + + const finalFile = path.join(migrationsDir, file) + + try{ + + // if we didn't have a status and there is a function that checks if the migration is needed, run it + if( (migrationStatus === undefined || migrationStatus === null) && needsMigration !== undefined && typeof needsMigration === "function"){ + + const result = await needsMigration(); + + // if no migration is needed, set it as migrated and return + if(!result){ + await setMigrationStatusHelper(id, "migrated"); + + return "ok"; + } - if (!hashcash || spamScore >= HASHCASH_THRESHOLDS.REJECT) { - return res.status(429).json({ - success: false, - message: `Insufficient proof of work or invalid/reused token. Required: ${HASHCASH_THRESHOLDS.TRIVIAL} bits. Score: ${spamScore}.` - }); } - if (spamScore > 0) { - status = 'spam'; + if(!fs.existsSync(finalFile)){ + throw new Error("Migrations file that was manually specified doesn't exist'") } - if (emailData.scheduled_at && status !== 'spam') { - status = 'scheduled'; + await sql.file(finalFile) + + await setMigrationStatusHelper(id, "migrated") + + return "ok" + }catch(err){ + console.warn(`An error occurred while running migration with id: '${id}' and description: ${description}`, err) + + await setMigrationStatusHelper(id, "failed") + return "error"; + } + +} + +async function tableExists(table, schema = "public"){ + + const tables = await sql` + SELECT * + FROM information_schema.tables + WHERE table_schema = ${schema} + AND table_name = ${table} + `; + + return tables.length !== 0 + +} + +async function createMigrationTableIfNotExists(migrationsDir){ + + const migrationTableExists = await tableExists('migrations') + + + if(migrationTableExists){ + return true + } + + const result = await runSingleMigration(migrationsDir,MIGRATIONS_INIT_MIGRATION, false) + + return (result !== "error") +} + +async function runMigrations(){ + + const migrationsDir = path.resolve(__dirname, "database", "migrations"); + + if(!fs.existsSync(migrationsDir)){ + console.warn("Migrations folder not included in image correctly, aborting migrations") + return; + } + + console.log("Running migrations") + + const succ = await createMigrationTableIfNotExists(migrationsDir); + + if(!succ){ + console.warn("An error occured, while creating the migrations table, aborting migrations") + return; + } + + const results = {"ok": 0, "error": 0, "skipped": 0} + + for(const migration of MIGRATIONS){ + const result = await runSingleMigration(migrationsDir, migration) + results[result] += 1 + } + + console.log(`Migrations Result: Total: ${MIGRATIONS.length} Ok: ${results.ok} Error: ${results.error} Skipped: ${results.skipped}`) +} + +function startIntervals(){ + // Cleanup for pending emails + setInterval(async () => { + try { + await sql` + UPDATE emails + SET status = 'failed', + error_message = 'Timed out while pending' + WHERE status = 'pending' + AND sent_at < NOW() - INTERVAL '30 seconds' + `; + } catch (error) { + console.error('Error updating stale pending emails:', error); } + }, 10000); - const { from, to, subject, body, content_type = 'text/plain', - html_body, scheduled_at, reply_to_id, thread_id, - attachments = [], expires_at = null, self_destruct = false } = emailData; + // Cleanup for expired emails + setInterval(async () => { + try { + const toDelete = await sql` + WITH RECURSIVE to_delete AS ( + SELECT id + FROM emails + WHERE expires_at < NOW() + AND expires_at IS NOT NULL + UNION ALL + SELECT e.id + FROM emails e + JOIN to_delete td ON e.reply_to_id = td.id + ) + SELECT id FROM to_delete + `; - if (fp.username !== req.user.username || fp.domain !== req.user.domain) { - return res.status(403).json({ - success: false, - message: 'You can only send emails from your own address.' - }); + if (toDelete.length > 0) { + const ids = toDelete.map(r => r.id); + await sql` + DELETE FROM attachments + WHERE email_id = ANY(${ids}) + `; + await sql` + DELETE FROM emails + WHERE id = ANY(${ids}) + `; + } + } catch (error) { + console.error('Error cleaning up expired emails:', error); } + }, 10000); - if (fp.domain !== DOMAIN) { - return res.status(403).json({ - success: false, - message: `This server does not relay mail for the domain ${fp.domain}` - }); + // Cleanup for used hashcash tokens + setInterval(async () => { + try { + const result = await sql` + DELETE FROM used_hashcash_tokens + WHERE expires_at < NOW() + `; + if (result.count > 0) { + console.log(`Cleaned up ${result.count} expired hashcash tokens.`); + } + } catch (error) { + console.error('Error cleaning up used hashcash tokens:', error); } + }, 3600000); // Run every hour + + + processScheduledEmails(); + setInterval(processScheduledEmails, 60000); + +} + +async function sleep(ms){ + return new Promise(resolve=>setInterval(resolve, ms)) +} - if (emailData.content_type === 'text/plain' && emailData.body) { - const users = await sql`SELECT iq FROM users WHERE username = ${req.user.username}`; - const userIQ = users[0]?.iq; - const { isValid, limit } = checkVocabulary(emailData.body, userIQ); - if (!isValid) { +async function waitForSqlConnection(){ + console.log("Waiting for a database connection") + while(true){ + try { + // the connection gets created on the first query, that is executed, so do that here + const result = await sql`SELECT NOW()`; + return + } catch (_) { + await sleep(1000) + } + + } + +} + +async function main(){ + await waitForSqlConnection(); + + await runMigrations() + + startIntervals() + + const app = express() + app.use(cors(), express.json()) + + + app.post('/send', validateAuthToken, async (req, res) => { + let logEntry; + let emailId; + try { + const { hashcash, ...emailData } = req.body; + + let fp, tp; + try { + fp = parseSharpAddress(emailData.from); + tp = parseSharpAddress(emailData.to); + } catch { return res.status(400).json({ success: false, - message: `Message contains words longer than the allowed ${limit} characters for your IQ level (${userIQ}). Please simplify.` + message: 'Invalid SHARP address format' }); } - } - if (hashcash && spamScore < HASHCASH_THRESHOLDS.REJECT) { - try { - const hashcashDate = parseHashcashDate(hashcash.split(':')[2]); + if (!req.turnstileVerified) { + return res.status(403).json({ + success: false, + message: 'Turnstile verification failed. Please try again.' + }); + } - const tokenExpiry = new Date(hashcashDate.getTime() + 24 * 60 * 60 * 1000); - await sql`INSERT INTO used_hashcash_tokens (token, expires_at) VALUES (${hashcash}, ${tokenExpiry}) ON CONFLICT (token) DO NOTHING`; - } catch (e) { - console.error(`Failed to log used hashcash token ${hashcash} for /send:`, e); - // proceed with email sending + const spamScore = await calculateSpamScore(hashcash, emailData.to); + let status = 'pending'; + + if (!hashcash || spamScore >= HASHCASH_THRESHOLDS.REJECT) { + return res.status(429).json({ + success: false, + message: `Insufficient proof of work or invalid/reused token. Required: ${HASHCASH_THRESHOLDS.TRIVIAL} bits. Score: ${spamScore}.` + }); } - } - const attachmentKeys = attachments.map(att => att.key).filter(Boolean); + if (spamScore > 0) { + status = 'spam'; + } - if (scheduled_at && status === 'scheduled') { - logEntry = await logEmail(from, fp.domain, to, tp.domain, subject, body, content_type, html_body, status, scheduled_at, reply_to_id, thread_id, expires_at, self_destruct); - emailId = logEntry[0]?.id; - if (emailId && attachmentKeys.length > 0) { - await sql`UPDATE attachments SET email_id = ${emailId}, status = ${status} WHERE key = ANY(${attachmentKeys})`; + if (emailData.scheduled_at && status !== 'spam') { + status = 'scheduled'; } - return res.json({ success: true, scheduled: true, id: emailId }); - } - if (tp.domain === DOMAIN) { - if (!await verifyUser(tp.username, tp.domain)) { - return res.status(404).json({ success: false, message: 'Recipient user not found on this server' }); + const { from, to, subject, body, content_type = 'text/plain', + html_body, scheduled_at, reply_to_id, thread_id, + attachments = [], expires_at = null, self_destruct = false } = emailData; + + if (fp.username !== req.user.username || fp.domain !== req.user.domain) { + return res.status(403).json({ + success: false, + message: 'You can only send emails from your own address.' + }); } - const finalStatus = status === 'pending' ? 'sent' : status; - logEntry = await logEmail(from, fp.domain, to, tp.domain, subject, body, content_type, html_body, finalStatus, null, reply_to_id, thread_id, expires_at, self_destruct); - emailId = logEntry[0]?.id; - if (emailId && attachmentKeys.length > 0) { - await sql`UPDATE attachments SET email_id = ${emailId}, status = ${finalStatus} WHERE key = ANY(${attachmentKeys})`; + + if (fp.domain !== DOMAIN) { + return res.status(403).json({ + success: false, + message: `This server does not relay mail for the domain ${fp.domain}` + }); } - return res.json({ success: true, id: emailId }); - } - logEntry = await logEmail( - from, fp.domain, to, tp.domain, subject, body, - content_type, html_body, status, scheduled_at, - reply_to_id, thread_id, expires_at, self_destruct - ); - emailId = logEntry[0]?.id; + if (emailData.content_type === 'text/plain' && emailData.body) { + const users = await sql`SELECT iq FROM users WHERE username = ${req.user.username}`; + const userIQ = users[0]?.iq; + const { isValid, limit } = checkVocabulary(emailData.body, userIQ); + if (!isValid) { + return res.status(400).json({ + success: false, + message: `Message contains words longer than the allowed ${limit} characters for your IQ level (${userIQ}). Please simplify.` + }); + } + } - if (emailId && attachmentKeys.length > 0) { - console.log(`[Remote] Linking ${attachmentKeys.length} attachments to email ID ${emailId}:`, attachmentKeys); - await sql` - UPDATE attachments - SET email_id = ${emailId}, - status = 'sending' - WHERE key = ANY(${attachmentKeys}) - `; - } + if (hashcash && spamScore < HASHCASH_THRESHOLDS.REJECT) { + try { + const hashcashDate = parseHashcashDate(hashcash.split(':')[2]); - // don't attempt remote delivery, just store as spam - if (status === 'spam') { - if (emailId) { - await sql`UPDATE emails SET status='spam' WHERE id=${emailId}`; - if (attachmentKeys.length > 0) { - await sql`UPDATE attachments SET status='spam' WHERE email_id = ${emailId}`; + const tokenExpiry = new Date(hashcashDate.getTime() + 24 * 60 * 60 * 1000); + await sql`INSERT INTO used_hashcash_tokens (token, expires_at) VALUES (${hashcash}, ${tokenExpiry}) ON CONFLICT (token) DO NOTHING`; + } catch (e) { + console.error(`Failed to log used hashcash token ${hashcash} for /send:`, e); + // proceed with email sending } } - return res.json({ success: true, id: emailId, message: "Email marked as spam due to low PoW or Turnstile policy." }); - } + const attachmentKeys = attachments.map(att => att.key).filter(Boolean); - try { - const result = await Promise.race([ - sendEmailToRemoteServer({ - from, to, subject, body, content_type, html_body, - attachments: attachmentKeys, - hashcash: hashcash - }), - new Promise((_, r) => setTimeout(() => { - r(new Error('Connection timed out')) - }, 10000)) - ]) - - if (result.responses?.some(r => r.type === 'ERROR')) { + if (scheduled_at && status === 'scheduled') { + logEntry = await logEmail(from, fp.domain, to, tp.domain, subject, body, content_type, html_body, status, scheduled_at, reply_to_id, thread_id, expires_at, self_destruct); + emailId = logEntry[0]?.id; + if (emailId && attachmentKeys.length > 0) { + await sql`UPDATE attachments SET email_id = ${emailId}, status = ${status} WHERE key = ANY(${attachmentKeys})`; + } + return res.json({ success: true, scheduled: true, id: emailId }); + } + + if (tp.domain === DOMAIN) { + if (!await verifyUser(tp.username, tp.domain)) { + return res.status(404).json({ success: false, message: 'Recipient user not found on this server' }); + } + const finalStatus = status === 'pending' ? 'sent' : status; + logEntry = await logEmail(from, fp.domain, to, tp.domain, subject, body, content_type, html_body, finalStatus, null, reply_to_id, thread_id, expires_at, self_destruct); + emailId = logEntry[0]?.id; + if (emailId && attachmentKeys.length > 0) { + await sql`UPDATE attachments SET email_id = ${emailId}, status = ${finalStatus} WHERE key = ANY(${attachmentKeys})`; + } + return res.json({ success: true, id: emailId }); + } + + logEntry = await logEmail( + from, fp.domain, to, tp.domain, subject, body, + content_type, html_body, status, scheduled_at, + reply_to_id, thread_id, expires_at, self_destruct + ); + emailId = logEntry[0]?.id; + + if (emailId && attachmentKeys.length > 0) { + console.log(`[Remote] Linking ${attachmentKeys.length} attachments to email ID ${emailId}:`, attachmentKeys); + await sql` + UPDATE attachments + SET email_id = ${emailId}, + status = 'sending' + WHERE key = ANY(${attachmentKeys}) + `; + } + + // don't attempt remote delivery, just store as spam + if (status === 'spam') { if (emailId) { - await sql`UPDATE emails SET status='rejected', error_message = ${result.responses.find(r => r.type === 'ERROR')?.message || 'Remote server rejected'} WHERE id=${emailId}`; + await sql`UPDATE emails SET status='spam' WHERE id=${emailId}`; if (attachmentKeys.length > 0) { - await sql`UPDATE attachments SET status='rejected' WHERE key = ANY(${attachmentKeys})`; + await sql`UPDATE attachments SET status='spam' WHERE email_id = ${emailId}`; } } - return res.status(400).json({ success: false, message: 'Remote server rejected the email' }) + return res.json({ success: true, id: emailId, message: "Email marked as spam due to low PoW or Turnstile policy." }); } - if (emailId) { - // Update status to 'sent' only upon successful remote delivery, - // even if it was initially marked as 'spam' by the sender. - // The recipient server will make its own final determination. - await sql`UPDATE emails SET status='sent', sent_at = NOW() WHERE id=${emailId}`; - if (attachmentKeys.length > 0) { - await sql`UPDATE attachments SET status='sent' WHERE key = ANY(${attachmentKeys})`; + + try { + const result = await Promise.race([ + sendEmailToRemoteServer({ + from, to, subject, body, content_type, html_body, + attachments: attachmentKeys, + hashcash: hashcash + }), + new Promise((_, r) => setTimeout(() => { + r(new Error('Connection timed out')) + }, 10000)) + ]) + + if (result.responses?.some(r => r.type === 'ERROR')) { + if (emailId) { + await sql`UPDATE emails SET status='rejected', error_message = ${result.responses.find(r => r.type === 'ERROR')?.message || 'Remote server rejected'} WHERE id=${emailId}`; + if (attachmentKeys.length > 0) { + await sql`UPDATE attachments SET status='rejected' WHERE key = ANY(${attachmentKeys})`; + } + } + return res.status(400).json({ success: false, message: 'Remote server rejected the email' }) } + + if (emailId) { + // Update status to 'sent' only upon successful remote delivery, + // even if it was initially marked as 'spam' by the sender. + // The recipient server will make its own final determination. + await sql`UPDATE emails SET status='sent', sent_at = NOW() WHERE id=${emailId}`; + if (attachmentKeys.length > 0) { + await sql`UPDATE attachments SET status='sent' WHERE key = ANY(${attachmentKeys})`; + } + } + return res.json({ ...result, id: emailId }); + } catch (e) { + if (emailId) { + await sql`UPDATE emails SET status='failed', error_message=${e.message} WHERE id=${emailId}`; + if (attachmentKeys.length > 0) { + await sql`UPDATE attachments SET status='failed' WHERE key = ANY(${attachmentKeys})`; + } + } + throw e; } - return res.json({ ...result, id: emailId }); } catch (e) { + console.error('Request failed:', e); if (emailId) { - await sql`UPDATE emails SET status='failed', error_message=${e.message} WHERE id=${emailId}`; - if (attachmentKeys.length > 0) { - await sql`UPDATE attachments SET status='failed' WHERE key = ANY(${attachmentKeys})`; - } - } - throw e; - } - } catch (e) { - console.error('Request failed:', e); - if (emailId) { - const checkStatus = await sql`SELECT status FROM emails WHERE id=${emailId}`; - if (checkStatus.length > 0 && !['failed', 'rejected', 'spam'].includes(checkStatus[0].status)) { - await sql`UPDATE emails SET status='failed', error_message=${e.message} WHERE id=${emailId}`; - const attachmentKeys = req.body.attachments?.map(att => att.key).filter(Boolean) || []; - if (attachmentKeys.length > 0) { - await sql`UPDATE attachments SET status='failed' WHERE email_id = ${emailId}`; + const checkStatus = await sql`SELECT status FROM emails WHERE id=${emailId}`; + if (checkStatus.length > 0 && !['failed', 'rejected', 'spam'].includes(checkStatus[0].status)) { + await sql`UPDATE emails SET status='failed', error_message=${e.message} WHERE id=${emailId}`; + const attachmentKeys = req.body.attachments?.map(att => att.key).filter(Boolean) || []; + if (attachmentKeys.length > 0) { + await sql`UPDATE attachments SET status='failed' WHERE email_id = ${emailId}`; + } } } - } - return res.status(400).json({ success: false, message: e.message }) - } -}) - -app.get('/server/health', (_, res) => - res.json({ - status: 'ok', - protocol: PROTOCOL_VERSION, - domain: DOMAIN, - hashcash: { - minBits: HASHCASH_THRESHOLDS.TRIVIAL, - recommendedBits: HASHCASH_THRESHOLDS.GOOD + return res.status(400).json({ success: false, message: e.message }) } }) -) - -net - .createServer(socket => { - const MAX_BUFFER_SIZE = 10 * 1024 * 1024; // 10MB total buffer limit - const remoteAddress = `${socket.remoteAddress}:${socket.remotePort}`; - const state = { step: 'HELLO', buffer: '', hashcash: null }; - - socket.on('data', d => { - if (state.buffer.length + d.length > MAX_BUFFER_SIZE) { - console.error(`Buffer overflow attempt from ${remoteAddress}`); - sendError(socket, 'Maximum message size exceeded'); - socket.destroy(); - return; + + app.get('/server/health', (_, res) => + res.json({ + status: 'ok', + protocol: PROTOCOL_VERSION, + domain: DOMAIN, + hashcash: { + minBits: HASHCASH_THRESHOLDS.TRIVIAL, + recommendedBits: HASHCASH_THRESHOLDS.GOOD } + }) + ) + + net + .createServer(socket => { + const MAX_BUFFER_SIZE = 10 * 1024 * 1024; // 10MB total buffer limit + const remoteAddress = `${socket.remoteAddress}:${socket.remotePort}`; + const state = { step: 'HELLO', buffer: '', hashcash: null }; + + socket.on('data', d => { + if (state.buffer.length + d.length > MAX_BUFFER_SIZE) { + console.error(`Buffer overflow attempt from ${remoteAddress}`); + sendError(socket, 'Maximum message size exceeded'); + socket.destroy(); + return; + } - state.buffer += d; - let idx; - while ((idx = state.buffer.indexOf('\n')) > -1) { - const line = state.buffer.slice(0, idx).replace(/\r$/, ''); - state.buffer = state.buffer.slice(idx + 1); - if (line.trim().length > 0) { - handleSharpMessage(socket, line, state); + state.buffer += d; + let idx; + while ((idx = state.buffer.indexOf('\n')) > -1) { + const line = state.buffer.slice(0, idx).replace(/\r$/, ''); + state.buffer = state.buffer.slice(idx + 1); + if (line.trim().length > 0) { + handleSharpMessage(socket, line, state); + } } - } - }); + }); - socket.on('error', (err) => { - console.error(`Socket error from ${remoteAddress}:`, err); - }); - socket.on('end', () => { - console.log(`Connection ended from ${remoteAddress}`); - }); - socket.on('close', (hadError) => { - console.log(`Connection closed from ${remoteAddress}. Had error: ${hadError}`); - }); - }) - .listen(SHARP_PORT, () => { - console.log( - `SHARP TCP server listening on port ${SHARP_PORT} ` + - `(HTTP on ${HTTP_PORT})` - ) - console.log(`Server address format: user#${DOMAIN}:${SHARP_PORT}`) + socket.on('error', (err) => { + console.error(`Socket error from ${remoteAddress}:`, err); + }); + socket.on('end', () => { + console.log(`Connection ended from ${remoteAddress}`); + }); + socket.on('close', (hadError) => { + console.log(`Connection closed from ${remoteAddress}. Had error: ${hadError}`); + }); + }) + .listen(SHARP_PORT, () => { + console.log( + `SHARP TCP server listening on port ${SHARP_PORT} ` + + `(HTTP on ${HTTP_PORT})` + ) + console.log(`Server address format: user#${DOMAIN}:${SHARP_PORT}`) + }) + + app.listen(HTTP_PORT, () => { + console.log(`HTTP server listening on port ${HTTP_PORT}`) }) +} -app.listen(HTTP_PORT, () => { - console.log(`HTTP server listening on port ${HTTP_PORT}`) -}) +main()