From 7d38be7b55e3bcebe2c0b9911df2f99f16f46c2a Mon Sep 17 00:00:00 2001
From: Carter Myers <206+cmyers@users.noreply.github.mieweb.com>
Date: Wed, 3 Dec 2025 16:00:04 -0700
Subject: [PATCH 1/5] Add scheduled job support and OCI image pull job
Introduces ScheduledJob model, migrations, and seeder for cron-based job scheduling. Updates job-runner to process scheduled jobs and create pending jobs when schedule conditions are met. Adds support for defaultStorage on nodes, including migration, model, and form update. Implements oci-build-job utility to pull OCI container images to Proxmox nodes using API credentials and storage configuration.
---
create-a-container/job-runner.js | 59 +++++
.../20251203000000-create-scheduled-jobs.js | 34 +++
...1203000001-add-default-storage-to-nodes.js | 14 ++
create-a-container/models/node.js | 5 +
create-a-container/models/scheduled-job.js | 24 ++
create-a-container/package.json | 1 +
.../20251203000000-seed-oci-build-job.js | 21 ++
create-a-container/utils/oci-build-job.js | 227 ++++++++++++++++++
create-a-container/views/nodes/form.ejs | 13 +
9 files changed, 398 insertions(+)
create mode 100644 create-a-container/migrations/20251203000000-create-scheduled-jobs.js
create mode 100644 create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js
create mode 100644 create-a-container/models/scheduled-job.js
create mode 100644 create-a-container/seeders/20251203000000-seed-oci-build-job.js
create mode 100644 create-a-container/utils/oci-build-job.js
diff --git a/create-a-container/job-runner.js b/create-a-container/job-runner.js
index 644b7c97..4c63874d 100644
--- a/create-a-container/job-runner.js
+++ b/create-a-container/job-runner.js
@@ -1,6 +1,7 @@
#!/usr/bin/env node
/**
* job-runner.js
+ * - Checks ScheduledJobs and creates pending Jobs when schedule conditions are met
* - Polls the Jobs table for pending jobs
* - Claims a job (transactionally), sets status to 'running'
* - Spawns the configured command and streams stdout/stderr into JobStatuses
@@ -9,6 +10,7 @@
const { spawn } = require('child_process');
const path = require('path');
+const parser = require('cron-parser');
const db = require('./models');
const POLL_INTERVAL_MS = parseInt(process.env.JOB_RUNNER_POLL_MS || '2000', 10);
@@ -17,6 +19,59 @@ const WORKDIR = process.env.JOB_RUNNER_CWD || process.cwd();
let shuttingDown = false;
// Map of jobId -> child process for active/running jobs
const activeChildren = new Map();
+// Track last scheduled job execution time to avoid duplicate runs
+const lastScheduledExecution = new Map();
+
+async function shouldScheduledJobRun(scheduledJob) {
+ try {
+ const interval = parser.parseExpression(scheduledJob.schedule);
+ const now = new Date();
+ const lastExecution = lastScheduledExecution.get(scheduledJob.id);
+
+ // Get the next occurrence from the schedule
+ const nextExecution = interval.next().toDate();
+ const currentMinute = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(), now.getMinutes());
+ const nextMinute = new Date(nextExecution.getFullYear(), nextExecution.getMonth(), nextExecution.getDate(), nextExecution.getHours(), nextExecution.getMinutes());
+
+ // If the next scheduled time is now and we haven't executed in this minute
+ if (currentMinute.getTime() === nextMinute.getTime()) {
+ if (!lastExecution || lastExecution.getTime() < currentMinute.getTime()) {
+ return true;
+ }
+ }
+ return false;
+ } catch (err) {
+ console.error(`Error parsing schedule for job ${scheduledJob.id}: ${err.message}`);
+ return false;
+ }
+}
+
+async function processScheduledJobs() {
+ try {
+ const scheduledJobs = await db.ScheduledJob.findAll();
+
+ for (const scheduledJob of scheduledJobs) {
+ if (await shouldScheduledJobRun(scheduledJob)) {
+ console.log(`JobRunner: Creating job from scheduled job ${scheduledJob.id}: ${scheduledJob.schedule}`);
+
+ try {
+ await db.Job.create({
+ command: scheduledJob.command,
+ status: 'pending',
+ createdBy: `ScheduledJob#${scheduledJob.id}`
+ });
+
+ // Mark that we've executed this scheduled job at this time
+ lastScheduledExecution.set(scheduledJob.id, new Date());
+ } catch (err) {
+ console.error(`Error creating job from scheduled job ${scheduledJob.id}:`, err);
+ }
+ }
+ }
+ } catch (err) {
+ console.error('Error processing scheduled jobs:', err);
+ }
+}
async function claimPendingJob() {
const sequelize = db.sequelize;
@@ -139,6 +194,10 @@ async function shutdownAndCancelJobs(signal) {
async function loop() {
if (shuttingDown) return;
try {
+ // Check for scheduled jobs that should run
+ await processScheduledJobs();
+
+ // Check for pending jobs
const job = await claimPendingJob();
if (job) {
// Run job but don't block polling loop; we will wait for job to update
diff --git a/create-a-container/migrations/20251203000000-create-scheduled-jobs.js b/create-a-container/migrations/20251203000000-create-scheduled-jobs.js
new file mode 100644
index 00000000..4e97fb47
--- /dev/null
+++ b/create-a-container/migrations/20251203000000-create-scheduled-jobs.js
@@ -0,0 +1,34 @@
+'use strict';
+/** @type {import('sequelize-cli').Migration} */
+module.exports = {
+ async up(queryInterface, Sequelize) {
+ await queryInterface.createTable('ScheduledJobs', {
+ id: {
+ allowNull: false,
+ autoIncrement: true,
+ primaryKey: true,
+ type: Sequelize.INTEGER
+ },
+ schedule: {
+ type: Sequelize.STRING(255),
+ allowNull: false,
+ comment: 'Cron-style schedule expression (e.g., "0 2 * * *" for daily at 2 AM)'
+ },
+ command: {
+ type: Sequelize.STRING(2000),
+ allowNull: false
+ },
+ createdAt: {
+ allowNull: false,
+ type: Sequelize.DATE
+ },
+ updatedAt: {
+ allowNull: false,
+ type: Sequelize.DATE
+ }
+ });
+ },
+ async down(queryInterface, Sequelize) {
+ await queryInterface.dropTable('ScheduledJobs');
+ }
+};
diff --git a/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js b/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js
new file mode 100644
index 00000000..ee66ef60
--- /dev/null
+++ b/create-a-container/migrations/20251203000001-add-default-storage-to-nodes.js
@@ -0,0 +1,14 @@
+'use strict';
+/** @type {import('sequelize-cli').Migration} */
+module.exports = {
+ async up(queryInterface, Sequelize) {
+ await queryInterface.addColumn('Nodes', 'defaultStorage', {
+ type: Sequelize.STRING(255),
+ allowNull: true,
+ comment: 'Default storage target for container templates and images'
+ });
+ },
+ async down(queryInterface, Sequelize) {
+ await queryInterface.removeColumn('Nodes', 'defaultStorage');
+ }
+};
diff --git a/create-a-container/models/node.js b/create-a-container/models/node.js
index 7181d9d9..126c0864 100644
--- a/create-a-container/models/node.js
+++ b/create-a-container/models/node.js
@@ -45,6 +45,11 @@ module.exports = (sequelize, DataTypes) => {
tlsVerify: {
type: DataTypes.BOOLEAN,
allowNull: true
+ },
+ defaultStorage: {
+ type: DataTypes.STRING(255),
+ allowNull: true,
+ comment: 'Default storage target for container templates and images'
}
}, {
sequelize,
diff --git a/create-a-container/models/scheduled-job.js b/create-a-container/models/scheduled-job.js
new file mode 100644
index 00000000..c684b113
--- /dev/null
+++ b/create-a-container/models/scheduled-job.js
@@ -0,0 +1,24 @@
+'use strict';
+const { Model } = require('sequelize');
+module.exports = (sequelize, DataTypes) => {
+ class ScheduledJob extends Model {
+ static associate(models) {
+ // ScheduledJob can be associated with created Jobs if needed
+ }
+ }
+ ScheduledJob.init({
+ schedule: {
+ type: DataTypes.STRING(255),
+ allowNull: false,
+ comment: 'Cron-style schedule expression (e.g., "0 2 * * *" for daily at 2 AM)'
+ },
+ command: {
+ type: DataTypes.STRING(2000),
+ allowNull: false
+ }
+ }, {
+ sequelize,
+ modelName: 'ScheduledJob'
+ });
+ return ScheduledJob;
+};
diff --git a/create-a-container/package.json b/create-a-container/package.json
index 3c0d6e24..b923259c 100644
--- a/create-a-container/package.json
+++ b/create-a-container/package.json
@@ -13,6 +13,7 @@
"argon2": "^0.44.0",
"axios": "^1.12.2",
"connect-flash": "^0.1.1",
+ "cron-parser": "^4.1.0",
"dotenv": "^17.2.3",
"ejs": "^3.1.10",
"express": "^5.2.1",
diff --git a/create-a-container/seeders/20251203000000-seed-oci-build-job.js b/create-a-container/seeders/20251203000000-seed-oci-build-job.js
new file mode 100644
index 00000000..ecfd95eb
--- /dev/null
+++ b/create-a-container/seeders/20251203000000-seed-oci-build-job.js
@@ -0,0 +1,21 @@
+'use strict';
+
+/** @type {import('sequelize-cli').Migration} */
+module.exports = {
+ async up(queryInterface, Sequelize) {
+ await queryInterface.bulkInsert('ScheduledJobs', [
+ {
+ schedule: '0 2 * * *',
+ command: 'node -e "require(\'./utils/oci-build-job\').run()"',
+ createdAt: new Date(),
+ updatedAt: new Date()
+ }
+ ], {});
+ },
+
+ async down(queryInterface, Sequelize) {
+ await queryInterface.bulkDelete('ScheduledJobs', {
+ command: { [Sequelize.Op.like]: '%oci-build-job%' }
+ }, {});
+ }
+};
diff --git a/create-a-container/utils/oci-build-job.js b/create-a-container/utils/oci-build-job.js
new file mode 100644
index 00000000..bbf753b6
--- /dev/null
+++ b/create-a-container/utils/oci-build-job.js
@@ -0,0 +1,227 @@
+#!/usr/bin/env node
+/**
+ * oci-build-job.js
+ *
+ * This utility is called by ScheduledJob to pull and configure OCI LXC container images
+ * (Debian 13 and Rocky 9) for Proxmox 9+.
+ *
+ * It reads configuration from the database (Nodes model) to get API URLs and tokens,
+ * then pulls OCI images from a container registry and makes them available in Proxmox storage.
+ */
+
+const axios = require('axios');
+const db = require('../models');
+const ProxmoxApi = require('./proxmox-api');
+
+/**
+ * Get list of available OCI images to pull
+ */
+function getOciImages() {
+ return [
+ {
+ name: 'debian13',
+ registry: process.env.OCI_REGISTRY || 'ghcr.io',
+ image: 'mieweb/opensource-server/debian13',
+ tag: process.env.OCI_IMAGE_TAG || 'latest'
+ },
+ {
+ name: 'rocky9',
+ registry: process.env.OCI_REGISTRY || 'ghcr.io',
+ image: 'mieweb/opensource-server/rocky9',
+ tag: process.env.OCI_IMAGE_TAG || 'latest'
+ }
+ ];
+}
+
+/**
+ * Parse OCI image reference into components
+ */
+function parseImageReference(imageSpec) {
+ // imageSpec format: registry/image:tag
+ const parts = imageSpec.split('/');
+ const registry = parts[0];
+ const remaining = parts.slice(1).join('/');
+ const [imagePath, tag] = remaining.split(':');
+
+ return { registry, imagePath, tag: tag || 'latest' };
+}
+
+/**
+ * Pull an OCI image to a Proxmox node
+ */
+async function pullImageToNode(node, imageSpec) {
+ console.log(`[OCI Build] Pulling image ${imageSpec} to node ${node.name}`);
+
+ if (!node.apiUrl || !node.tokenId || !node.secret) {
+ console.warn(`[OCI Build] Warning: Node ${node.name} missing API credentials, skipping`);
+ return false;
+ }
+
+ try {
+ const api = new ProxmoxApi(node.apiUrl, node.tokenId, node.secret, {
+ httpsAgent: { rejectUnauthorized: node.tlsVerify !== false }
+ });
+
+ // Get list of storages on the node
+ const storages = await api.datastores(node.name, 'vztmpl');
+
+ // Choose storage (prefer defaultStorage if set, otherwise use first available)
+ let targetStorage = null;
+ if (node.defaultStorage) {
+ targetStorage = storages.find(s => s.storage === node.defaultStorage);
+ }
+ if (!targetStorage && storages.length > 0) {
+ targetStorage = storages[0];
+ }
+
+ if (!targetStorage) {
+ console.warn(`[OCI Build] No suitable storage found on node ${node.name}, skipping`);
+ return false;
+ }
+
+ console.log(`[OCI Build] Using storage ${targetStorage.storage} on ${node.name}`);
+
+ // Call the Proxmox API to pull the OCI image
+ // This uses the pct pull-image API endpoint available in Proxmox 9+
+ const pullResponse = await axios.post(
+ `${node.apiUrl}/api2/json/nodes/${encodeURIComponent(node.name)}/pull-image`,
+ {
+ image: imageSpec,
+ storage: targetStorage.storage
+ },
+ {
+ headers: {
+ 'Authorization': `PVEAPIToken=${node.tokenId}=${node.secret}`
+ },
+ httpsAgent: { rejectUnauthorized: node.tlsVerify !== false }
+ }
+ );
+
+ // The response contains a task ID that we should monitor
+ const upid = pullResponse.data.data;
+ console.log(`[OCI Build] Image pull started on ${node.name}, task: ${upid}`);
+
+ // Optionally wait for task completion
+ await waitForTaskCompletion(node, upid);
+
+ console.log(`[OCI Build] Successfully pulled ${imageSpec} to ${node.name}`);
+ return true;
+ } catch (err) {
+ console.error(`[OCI Build] Error pulling image to ${node.name}: ${err.message}`);
+ return false;
+ }
+}
+
+/**
+ * Wait for a Proxmox task to complete
+ */
+async function waitForTaskCompletion(node, upid, maxWaitMs = 600000) {
+ const startTime = Date.now();
+ const pollIntervalMs = 5000;
+
+ try {
+ const api = new ProxmoxApi(node.apiUrl, node.tokenId, node.secret, {
+ httpsAgent: { rejectUnauthorized: node.tlsVerify !== false }
+ });
+
+ while (Date.now() - startTime < maxWaitMs) {
+ try {
+ const taskStatus = await api.taskStatus(node.name, upid);
+
+ if (taskStatus.status === 'stopped') {
+ if (taskStatus.exitstatus === 'OK') {
+ console.log(`[OCI Build] Task ${upid} completed successfully`);
+ return true;
+ } else {
+ throw new Error(`Task failed: ${taskStatus.exitstatus}`);
+ }
+ }
+
+ // Task still running, wait and retry
+ await new Promise(resolve => setTimeout(resolve, pollIntervalMs));
+ } catch (err) {
+ if (err.response?.status === 404) {
+ // Task might have completed and been cleaned up
+ return true;
+ }
+ throw err;
+ }
+ }
+
+ throw new Error(`Task did not complete within ${maxWaitMs}ms`);
+ } catch (err) {
+ console.warn(`[OCI Build] Could not verify task completion: ${err.message}. Continuing...`);
+ return true; // Don't fail, the task might still complete
+ }
+}
+
+/**
+ * Main job execution
+ */
+async function run() {
+ try {
+ console.log('[OCI Build] Starting OCI container image pull job');
+
+ // Ensure database connection
+ await db.sequelize.authenticate();
+ console.log('[OCI Build] Database connected');
+
+ // Get all nodes
+ const nodes = await db.Node.findAll();
+ if (!nodes || nodes.length === 0) {
+ throw new Error('No Proxmox nodes configured in database');
+ }
+
+ console.log(`[OCI Build] Found ${nodes.length} node(s) to update`);
+
+ // Get list of images to pull
+ const images = getOciImages();
+ console.log(`[OCI Build] Will pull ${images.length} image(s): ${images.map(i => i.name).join(', ')}`);
+
+ // Pull each image to each node
+ let successCount = 0;
+ let failureCount = 0;
+
+ for (const image of images) {
+ const imageRef = `${image.registry}/${image.image}:${image.tag}`;
+ console.log(`\n[OCI Build] Processing image: ${imageRef}`);
+
+ for (const node of nodes) {
+ try {
+ const success = await pullImageToNode(node, imageRef);
+ if (success) {
+ successCount++;
+ } else {
+ failureCount++;
+ }
+ } catch (err) {
+ console.error(`[OCI Build] Exception pulling to ${node.name}: ${err.message}`);
+ failureCount++;
+ }
+ }
+ }
+
+ console.log(`\n[OCI Build] Job completed - ${successCount} successful, ${failureCount} failed`);
+
+ if (failureCount === 0) {
+ console.log('[OCI Build] OCI image pull job completed successfully');
+ process.exit(0);
+ } else if (successCount > 0) {
+ console.log('[OCI Build] OCI image pull job completed with some failures');
+ process.exit(0); // Partial success is acceptable
+ } else {
+ throw new Error('All image pulls failed');
+ }
+ } catch (err) {
+ console.error('[OCI Build] Fatal error:', err.message);
+ process.exit(1);
+ }
+}
+
+module.exports = { run };
+
+// If called directly as a script
+if (require.main === module) {
+ run();
+}
+
diff --git a/create-a-container/views/nodes/form.ejs b/create-a-container/views/nodes/form.ejs
index 79cad2be..5d65897a 100644
--- a/create-a-container/views/nodes/form.ejs
+++ b/create-a-container/views/nodes/form.ejs
@@ -98,6 +98,19 @@
Whether to verify TLS certificates when connecting to this node
+
+
+
+
Default storage target for container templates and images (optional)
+
+
Cancel