diff --git a/README.md b/README.md index ca2e7d2b..df946621 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ With Proxmox 9's native OCI container support, the easiest installation method i ```bash # Pull and run the container from GHCR -pct create ghcr.io/mieweb/opensource-server:latest \ + pct create ghcr.io/mieweb/opensource-server:latest \ --hostname opensource-server \ --net0 name=eth0,bridge=vmbr0,ip=dhcp \ --features nesting=1 \ diff --git a/create-a-container/bin/oci-build-push-pull.js b/create-a-container/bin/oci-build-push-pull.js new file mode 100644 index 00000000..e2254bc7 --- /dev/null +++ b/create-a-container/bin/oci-build-push-pull.js @@ -0,0 +1,254 @@ +#!/usr/bin/env node +/** + * oci-build-push-pull.js + * + * Combined OCI image build, push, and pull job. + * + * This utility: + * 1. Builds site-specific OCI images using Docker with --build-arg DOMAIN + * 2. Pushes site images to a local registry + * 3. Pulls pre-built OCI images (Debian 13, Rocky 9) to all Proxmox nodes + * 4. Pulls site-specific images to all Proxmox nodes + * + * Environment variables: + * - LOCAL_REGISTRY (default: localhost:5000) + * - OCI_REPO (default: opensource-server) + * - BUILD_CONTEXT (default: /opt/opensource-server) + * - DOCKERFILE_PATH (default: /opt/opensource-server/templates/debian.Dockerfile) + * - IMAGE_TAG_SUFFIX (default: latest) + * - OCI_IMAGE_TAG (default: latest, for pre-built images) + */ + +const { spawn } = require('child_process'); +const db = require('../models'); +const ProxmoxApi = require('../utils/proxmox-api'); + +/** + * Sanitize domain/site name into valid Docker tag. + * Converts to lowercase, replaces invalid characters with hyphens, and limits length. + * @param {string} s - Input domain or site name + * @returns {string} Sanitized tag suitable for Docker image naming + */ +function sanitizeTag(s) { + return (s || 'site').toLowerCase().replace(/[^a-z0-9._-]/g, '-').replace(/-+/g, '-').slice(0, 128); +} + +/** + * Execute a command and stream output to console, returning a promise. + * @param {string} cmd - Command to execute (e.g., 'docker') + * @param {string[]} args - Command arguments + * @param {object} [opts={}] - Additional options for spawn (e.g., cwd, env) + * @returns {Promise} Resolves on success, rejects if command exits with non-zero code + */ +function runCommandStreamed(cmd, args, opts = {}) { + return new Promise((resolve, reject) => { + const p = spawn(cmd, args, Object.assign({ stdio: 'inherit' }, opts)); + p.on('error', reject); + p.on('close', code => { + if (code === 0) resolve(); + else reject(new Error(`${cmd} ${args.join(' ')} exited with code ${code}`)); + }); + }); +} + +/** + * Get list of pre-built OCI images to pull from registry. + * Reads environment variables to determine registry and image tags. + * @returns {Array<{name: string, registry: string, image: string, tag: string}>} List of pre-built image specifications + */ +// No pre-built images are used; we only operate on site-built images. + +/** + * Build and push a site-specific OCI image using Docker. + * @param {object} site - Site database object with id, name, domain, internalDomain properties + * @param {string} registry - Container registry URL (e.g., localhost:5000) + * @param {string} repoBase - Repository base path in registry (e.g., opensource-server) + * @param {string} buildContext - Docker build context path + * @param {string} dockerfilePath - Path to Dockerfile to use for build + * @param {string} [tagSuffix='latest'] - Image tag suffix (appended after domain) + * @returns {Promise<{imageRef: string, domain: string}>} Built image reference and domain used + * @throws {Error} If docker build or push fails + */ +async function buildAndPushImageForSite(site, registry, repoBase, buildContext, dockerfilePath, tagSuffix = 'latest') { + const domain = site.internalDomain || site.domain || site.name || `site-${site.id}`; + const sanitized = sanitizeTag(domain); + const imageRef = `${registry}/${repoBase}/${sanitized}:${tagSuffix}`; + + console.log(`[oci-build-push-pull] Building image for site ${site.id} (${domain}) -> ${imageRef}`); + + // docker build --build-arg DOMAIN=${domain} -f -t + await runCommandStreamed('docker', [ + 'build', + '--build-arg', `DOMAIN=${domain}`, + '-f', dockerfilePath, + '-t', imageRef, + buildContext + ]); + + console.log(`[oci-build-push-pull] Pushing image ${imageRef} to registry ${registry}`); + await runCommandStreamed('docker', ['push', imageRef]); + + return { imageRef, domain }; +} + +/** + * Main job execution: orchestrate three phases of OCI image management. + * Phase 1: Build site-specific images from Dockerfile and push to registry. + * Phase 2: Prepare list of all images (site + pre-built) to pull. + * Phase 3: Pull all images to all Proxmox nodes concurrently. + * + * Configuration may be supplied via CLI args (preferred) or environment variables as fallbacks. + * @param {object} [opts] + * @param {string} [opts.registry] - Container registry (overrides LOCAL_REGISTRY env) + * @param {string} [opts.repoBase] - Repository base path in registry (overrides OCI_REPO env) + * @param {string} [opts.buildContext] - Docker build context path (overrides BUILD_CONTEXT env) + * @param {string} [opts.dockerfilePath] - Path to Dockerfile (overrides DOCKERFILE_PATH env) + * @param {string} [opts.tagSuffix] - Image tag suffix (overrides IMAGE_TAG_SUFFIX env) + * @returns {Promise} Resolves on completion, calls process.exit(0) or process.exit(1) + */ +async function run(opts = {}) { + const registry = opts.registry || process.env.LOCAL_REGISTRY || 'localhost:5000'; + const repoBase = opts.repoBase || process.env.OCI_REPO || 'opensource-server'; + const buildContext = opts.buildContext || process.env.BUILD_CONTEXT || '/opt/opensource-server'; + const dockerfilePath = opts.dockerfilePath || process.env.DOCKERFILE_PATH || '/opt/opensource-server/templates/debian.Dockerfile'; + const tagSuffix = opts.tagSuffix || process.env.IMAGE_TAG_SUFFIX || 'latest'; + + try { + await db.sequelize.authenticate(); + console.log('[oci-build-push-pull] Database connected'); + + // ========== PHASE 1: Build and push site-specific images ========== + console.log('[oci-build-push-pull] ========== PHASE 1: Build & Push Site Images =========='); + + const sites = await db.Site.findAll(); + const siteImages = []; + + if (!sites || sites.length === 0) { + console.warn('[oci-build-push-pull] No sites found in DB; skipping site image builds'); + } else { + console.log(`[oci-build-push-pull] Found ${sites.length} site(s) to build`); + + // Run all site builds in parallel and collect results + const buildPromises = sites.map(site => buildAndPushImageForSite(site, registry, repoBase, buildContext, dockerfilePath, tagSuffix)); + const buildResults = await Promise.allSettled(buildPromises); + + buildResults.forEach((res, idx) => { + const site = sites[idx]; + if (res.status === 'fulfilled') { + siteImages.push(res.value.imageRef); + } else { + console.error(`[oci-build-push-pull] Error building/pushing for site ${site.id}: ${res.reason && res.reason.message ? res.reason.message : res.reason}`); + } + }); + + console.log(`[oci-build-push-pull] Successfully built and pushed ${siteImages.length}/${sites.length} site images`); + } + + // ========== PHASE 2: Prepare all images to pull ========== + // We only pull site-built images; pre-built templates are not handled here. + const allImagesToPull = [...siteImages]; + console.log(`[oci-build-push-pull] Will pull ${allImagesToPull.length} site image(s):`); + allImagesToPull.forEach(img => console.log(` - ${img}`)); + + // ========== PHASE 3: Pull all images to all nodes ========== + console.log('[oci-build-push-pull] ========== PHASE 3: Pull Images to Nodes =========='); + + const nodes = await db.Node.findAll(); + if (!nodes || nodes.length === 0) { + console.warn('[oci-build-push-pull] No Proxmox nodes found in DB; skipping pull operations'); + console.log('[oci-build-push-pull] Job completed successfully'); + process.exit(0); + } + + console.log(`[oci-build-push-pull] Found ${nodes.length} Proxmox node(s)`); + + let totalSuccess = 0; + let totalFailure = 0; + + for (const imageRef of allImagesToPull) { + console.log(`[oci-build-push-pull] Pulling image: ${imageRef}`); + + const pulls = nodes.map(async (node) => { + if (!node.apiUrl || !node.tokenId || !node.secret) { + console.warn(`[oci-build-push-pull] Node ${node.name} missing API credentials, skipping pull`); + return false; + } + + try { + const api = new ProxmoxApi(node.apiUrl, node.tokenId, node.secret, { + httpsAgent: { rejectUnauthorized: node.tlsVerify !== false } + }); + + const targetStorage = await api.chooseStorageForVztmpl(node.name, node.defaultStorage); + if (!targetStorage) { + console.warn(`[oci-build-push-pull] No suitable storage on node ${node.name}, skipping`); + return false; + } + + console.log(`[oci-build-push-pull] Instructing node ${node.name} to pull ${imageRef} into storage ${targetStorage}`); + await api.pullImageAndWait(node.name, imageRef, targetStorage); + return true; + } catch (err) { + console.error(`[oci-build-push-pull] Failed to pull ${imageRef} on ${node.name}: ${err.message}`); + return false; + } + }); + + const results = await Promise.allSettled(pulls); + const success = results.filter(r => r.status === 'fulfilled' && r.value === true).length; + const failed = results.length - success; + + totalSuccess += success; + totalFailure += failed; + + console.log(`[oci-build-push-pull] Image ${imageRef} pulled to ${success}/${nodes.length} nodes (${failed} failures)`); + } + + // ========== Final Summary ========== + console.log('[oci-build-push-pull] ========== Job Summary =========='); + console.log(`[oci-build-push-pull] Site images built: ${siteImages.length}`); + console.log(`[oci-build-push-pull] Total pull operations: ${totalSuccess + totalFailure}`); + console.log(`[oci-build-push-pull] Successful pulls: ${totalSuccess}`); + console.log(`[oci-build-push-pull] Failed pulls: ${totalFailure}`); + + if (totalFailure === 0 || totalSuccess > 0) { + console.log('[oci-build-push-pull] OCI build, push, and pull job completed successfully'); + process.exit(0); + } else { + throw new Error('All pull operations failed'); + } + } catch (err) { + console.error('[oci-build-push-pull] Fatal error:', err.message); + process.exit(1); + } +} + +// Simple CLI arg parser supporting `--key=value` and `--key value` forms +function parseCliArgs() { + const argv = process.argv.slice(2); + const out = {}; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (!a.startsWith('--')) continue; + const eq = a.indexOf('='); + if (eq !== -1) { + const key = a.slice(2, eq); + const val = a.slice(eq + 1); + out[key] = val; + } else { + const key = a.slice(2); + const next = argv[i + 1]; + if (next && !next.startsWith('--')) { + out[key] = next; + i++; + } else { + out[key] = 'true'; + } + } + } + return out; +} + +// Execute the job when this file is loaded by the scheduler, using CLI args if provided +const parsedOptions = parseCliArgs(); +run(parsedOptions); diff --git a/create-a-container/job-runner.js b/create-a-container/job-runner.js index 644b7c97..5e88b8de 100644 --- a/create-a-container/job-runner.js +++ b/create-a-container/job-runner.js @@ -1,6 +1,7 @@ #!/usr/bin/env node /** * job-runner.js + * - Checks ScheduledJobs and creates pending Jobs when schedule conditions are met * - Polls the Jobs table for pending jobs * - Claims a job (transactionally), sets status to 'running' * - Spawns the configured command and streams stdout/stderr into JobStatuses @@ -9,6 +10,7 @@ const { spawn } = require('child_process'); const path = require('path'); +const parser = require('cron-parser'); const db = require('./models'); const POLL_INTERVAL_MS = parseInt(process.env.JOB_RUNNER_POLL_MS || '2000', 10); @@ -17,6 +19,59 @@ const WORKDIR = process.env.JOB_RUNNER_CWD || process.cwd(); let shuttingDown = false; // Map of jobId -> child process for active/running jobs const activeChildren = new Map(); +// Track last scheduled job execution time to avoid duplicate runs +const lastScheduledExecution = new Map(); + +async function shouldScheduledJobRun(scheduledJob) { + try { + const interval = parser.parseExpression(scheduledJob.schedule); + const now = new Date(); + const lastExecution = lastScheduledExecution.get(scheduledJob.id); + + // Get the next occurrence from the schedule + const nextExecution = interval.next().toDate(); + const currentMinute = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(), now.getMinutes()); + const nextMinute = new Date(nextExecution.getFullYear(), nextExecution.getMonth(), nextExecution.getDate(), nextExecution.getHours(), nextExecution.getMinutes()); + + // If the next scheduled time is now and we haven't executed in this minute + if (currentMinute.getTime() === nextMinute.getTime()) { + if (!lastExecution || lastExecution.getTime() < currentMinute.getTime()) { + return true; + } + } + return false; + } catch (err) { + console.error(`Error parsing schedule for job ${scheduledJob.id}: ${err.message}`); + return false; + } +} + +async function processScheduledJobs() { + try { + const scheduledJobs = await db.ScheduledJob.findAll(); + + for (const scheduledJob of scheduledJobs) { + if (await shouldScheduledJobRun(scheduledJob)) { + console.log(`JobRunner: Creating job from scheduled job ${scheduledJob.id}: ${scheduledJob.schedule}`); + + try { + await db.Job.create({ + command: scheduledJob.command, + status: 'pending', + createdBy: `ScheduledJob#${scheduledJob.id}` + }); + + // Mark that we've executed this scheduled job at this time + lastScheduledExecution.set(scheduledJob.id, new Date()); + } catch (err) { + console.error(`Error creating job from scheduled job ${scheduledJob.id}:`, err); + } + } + } + } catch (err) { + console.error('Error processing scheduled jobs:', err); + } +} async function claimPendingJob() { const sequelize = db.sequelize; @@ -139,6 +194,10 @@ async function shutdownAndCancelJobs(signal) { async function loop() { if (shuttingDown) return; try { + // Check for scheduled jobs that should run (run async so it doesn't block the loop) + processScheduledJobs().catch(err => console.error('processScheduledJobs error', err)); + + // Check for pending jobs const job = await claimPendingJob(); if (job) { // Run job but don't block polling loop; we will wait for job to update diff --git a/create-a-container/migrations/20251203000000-create-scheduled-jobs.js b/create-a-container/migrations/20251203000000-create-scheduled-jobs.js new file mode 100644 index 00000000..4e97fb47 --- /dev/null +++ b/create-a-container/migrations/20251203000000-create-scheduled-jobs.js @@ -0,0 +1,34 @@ +'use strict'; +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable('ScheduledJobs', { + id: { + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.INTEGER + }, + schedule: { + type: Sequelize.STRING(255), + allowNull: false, + comment: 'Cron-style schedule expression (e.g., "0 2 * * *" for daily at 2 AM)' + }, + command: { + type: Sequelize.STRING(2000), + allowNull: false + }, + createdAt: { + allowNull: false, + type: Sequelize.DATE + }, + updatedAt: { + allowNull: false, + type: Sequelize.DATE + } + }); + }, + async down(queryInterface, Sequelize) { + await queryInterface.dropTable('ScheduledJobs'); + } +}; diff --git a/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js b/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js new file mode 100644 index 00000000..ee66ef60 --- /dev/null +++ b/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js @@ -0,0 +1,14 @@ +'use strict'; +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.addColumn('Nodes', 'defaultStorage', { + type: Sequelize.STRING(255), + allowNull: true, + comment: 'Default storage target for container templates and images' + }); + }, + async down(queryInterface, Sequelize) { + await queryInterface.removeColumn('Nodes', 'defaultStorage'); + } +}; diff --git a/create-a-container/models/node.js b/create-a-container/models/node.js index 7181d9d9..126c0864 100644 --- a/create-a-container/models/node.js +++ b/create-a-container/models/node.js @@ -45,6 +45,11 @@ module.exports = (sequelize, DataTypes) => { tlsVerify: { type: DataTypes.BOOLEAN, allowNull: true + }, + defaultStorage: { + type: DataTypes.STRING(255), + allowNull: true, + comment: 'Default storage target for container templates and images' } }, { sequelize, diff --git a/create-a-container/models/scheduled-job.js b/create-a-container/models/scheduled-job.js new file mode 100644 index 00000000..c684b113 --- /dev/null +++ b/create-a-container/models/scheduled-job.js @@ -0,0 +1,24 @@ +'use strict'; +const { Model } = require('sequelize'); +module.exports = (sequelize, DataTypes) => { + class ScheduledJob extends Model { + static associate(models) { + // ScheduledJob can be associated with created Jobs if needed + } + } + ScheduledJob.init({ + schedule: { + type: DataTypes.STRING(255), + allowNull: false, + comment: 'Cron-style schedule expression (e.g., "0 2 * * *" for daily at 2 AM)' + }, + command: { + type: DataTypes.STRING(2000), + allowNull: false + } + }, { + sequelize, + modelName: 'ScheduledJob' + }); + return ScheduledJob; +}; diff --git a/create-a-container/package.json b/create-a-container/package.json index 3c0d6e24..b923259c 100644 --- a/create-a-container/package.json +++ b/create-a-container/package.json @@ -13,6 +13,7 @@ "argon2": "^0.44.0", "axios": "^1.12.2", "connect-flash": "^0.1.1", + "cron-parser": "^4.1.0", "dotenv": "^17.2.3", "ejs": "^3.1.10", "express": "^5.2.1", diff --git a/create-a-container/seeders/20251203000000-seed-oci-build-job.js b/create-a-container/seeders/20251203000000-seed-oci-build-job.js new file mode 100644 index 00000000..d59258b9 --- /dev/null +++ b/create-a-container/seeders/20251203000000-seed-oci-build-job.js @@ -0,0 +1,21 @@ +'use strict'; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.bulkInsert('ScheduledJobs', [ + { + schedule: '0 2 * * *', + command: 'node create-a-container/bin/oci-build-push-pull.js', + createdAt: new Date(), + updatedAt: new Date() + } + ], {}); + }, + + async down(queryInterface, Sequelize) { + await queryInterface.bulkDelete('ScheduledJobs', { + command: { [Sequelize.Op.like]: '%oci-build-push-pull%' } + }, {}); + } +}; diff --git a/create-a-container/templates/debian.Dockerfile b/create-a-container/templates/debian.Dockerfile new file mode 100644 index 00000000..7db2dcb3 --- /dev/null +++ b/create-a-container/templates/debian.Dockerfile @@ -0,0 +1,31 @@ +# Debian OCI image template using Proxmox minimal LXC rootfs +# +# This multi-stage Dockerfile downloads Proxmox's minimal Debian LXC template +# (tar.zst) and unpacks it into the final image rootfs. This produces a +# filesystem layout suitable for OCI/LXC usage and avoids depending on a Debian +# base image that may differ from Proxmox's optimized template. + +FROM debian:13 AS builder +ARG URL="http://download.proxmox.com/images/system/debian-13-standard_13.1-2_amd64.tar.zst" +ARG DOMAIN +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + tar \ + zstd \ + ca-certificates && \ + rm -rf /var/lib/apt/lists/* && \ + mkdir -p /rootfs/usr/local/bin && \ + curl -fsSL "$URL" | tar --zstd -x -C /rootfs && \ + curl -fsSL https://pown.sh/ -o /tmp/pown.sh && \ + chmod +x /tmp/pown.sh && \ + cp /tmp/pown.sh /rootfs/usr/local/bin/pown.sh && \ + chmod +x /rootfs/usr/local/bin/pown.sh && \ + chroot /rootfs /usr/local/bin/pown.sh "$DOMAIN" + +# Final image uses the unpacked rootfs +FROM scratch +COPY --from=builder /rootfs / + +# Optional: allow customizations at build-time (example: run site installer) +ARG DOMAIN +RUN true diff --git a/create-a-container/utils/proxmox-api.js b/create-a-container/utils/proxmox-api.js index 5a8c6690..5ca5dc1e 100644 --- a/create-a-container/utils/proxmox-api.js +++ b/create-a-container/utils/proxmox-api.js @@ -234,6 +234,86 @@ class ProxmoxApi { return response.data.data; } + /** + * Pull an OCI image into node storage using Proxmox's pull-image endpoint + * @param {string} nodeName + * @param {string} image - full image ref (registry/repo:tag) + * @param {string} storage + * @returns {Promise} - UPID task id + */ + async pullImage(nodeName, image, storage) { + // Use global fetch. + + const url = `${this.baseUrl}/api2/json/nodes/${encodeURIComponent(nodeName)}/pull-image`; + const headers = Object.assign({}, this.options && this.options.headers ? this.options.headers : {}); + headers['Content-Type'] = 'application/json'; + + // Use httpsAgent from options if provided; consumer is responsible for creating it + const agent = this.options && this.options.httpsAgent ? this.options.httpsAgent : undefined; + + const resp = await fetch(url, { + method: 'POST', + headers, + body: JSON.stringify({ image, storage }), + // node-fetch / undici supports 'agent' option + agent + }); + + const body = await resp.json().catch(() => null); + if (!resp.ok) { + const errMsg = body && (body.error || JSON.stringify(body)) ? (body.error || JSON.stringify(body)) : resp.statusText; + throw new Error(`Proxmox pull-image failed: ${resp.status} ${errMsg}`); + } + + return body && body.data ? body.data : null; + } + + /** + * Choose a suitable storage for container templates on a node. + * @param {string} nodeName + * @param {string|null} preferredStorage + * @returns {Promise} - storage name or null if none found + */ + async chooseStorageForVztmpl(nodeName, preferredStorage = null) { + const storages = await this.datastores(nodeName, 'vztmpl'); + if (!storages || storages.length === 0) return null; + if (preferredStorage) { + const found = storages.find(s => s.storage === preferredStorage); + if (found) return found.storage; + } + return storages[0].storage; + } + + /** + * Pull an image and wait for task completion. + * @param {string} nodeName + * @param {string} image + * @param {string} storage + * @param {object} [opts] + * @param {number} [opts.pollIntervalMs] + * @param {number} [opts.maxWaitMs] + * @returns {Promise} - true if succeeded + */ + async pullImageAndWait(nodeName, image, storage, opts = {}) { + const upid = await this.pullImage(nodeName, image, storage); + const pollIntervalMs = opts.pollIntervalMs || parseInt(process.env.PULL_POLL_MS || '5000', 10); + const maxWaitMs = opts.maxWaitMs || parseInt(process.env.PULL_MAX_WAIT_MS || '600000', 10); + const start = Date.now(); + let statusObj = null; + + while (Date.now() - start < maxWaitMs) { + statusObj = await this.taskStatus(nodeName, upid); + if (statusObj && statusObj.status === 'stopped') break; + await new Promise(r => setTimeout(r, pollIntervalMs)); + } + + if (!statusObj) throw new Error(`Could not retrieve status for task ${upid}`); + if (statusObj.exitstatus && statusObj.exitstatus !== 'OK') { + throw new Error(`Task ${upid} failed with exitstatus=${statusObj.exitstatus}`); + } + return true; + } + /** * Delete a container * @param {string} nodeName diff --git a/create-a-container/views/nodes/form.ejs b/create-a-container/views/nodes/form.ejs index 79cad2be..5d65897a 100644 --- a/create-a-container/views/nodes/form.ejs +++ b/create-a-container/views/nodes/form.ejs @@ -98,6 +98,19 @@
Whether to verify TLS certificates when connecting to this node
+
+ + +
Default storage target for container templates and images (optional)
+
+