diff --git a/tools/dev/opencode-dev-test-helpers.ts b/tools/dev/opencode-dev-test-helpers.ts new file mode 100644 index 0000000..0a9ab4b --- /dev/null +++ b/tools/dev/opencode-dev-test-helpers.ts @@ -0,0 +1,144 @@ +import fs from 'fs'; +import path from 'path'; + +import { parse as parseJsonC, modify as modifyJsonC, applyEdits } from 'jsonc-parser'; + +/** + * + */ +export function readJsonc(file: string): { json: any; raw: string } { + const raw = fs.readFileSync(file, 'utf8'); + const errors: any[] = []; + const json = parseJsonC(raw, errors, { allowTrailingComma: true }); + if (errors.length) throw new Error('parse error'); + return { json, raw }; +} + +/** + * + */ +export function writeJsonc(file: string, originalRaw: string | null, obj: any) { + const base = originalRaw || ''; + const edits = modifyJsonC(base, ['plugin'], obj.plugin || [], { + formattingOptions: { insertSpaces: true, tabSize: 2 }, + } as any); + const newText = applyEdits(base, edits); + fs.writeFileSync(file, newText, 'utf8'); +} + +/** + * + */ +export async function createSymlink(target: string, linkPath: string, forceFail = false) { + try { + // Simulate symlink failure when forceFail is true + if (forceFail) throw new Error('simulated symlink failure'); + try { + await fs.promises.lstat(linkPath); + await fs.promises.rm(linkPath, { recursive: true }); + } catch {} + await fs.promises.symlink(target, linkPath, 'junction'); + } catch (err) { + // fallback to copy + await copyDir(target, linkPath); + } +} + +/** + * + */ +export async function copyDir(src: string, dest: string) { + await fs.promises.mkdir(dest, { recursive: true }); + const entries = await fs.promises.readdir(src, { withFileTypes: true }); + for (const e of entries) { + const srcPath = path.join(src, e.name); + const destPath = path.join(dest, e.name); + if (e.isDirectory()) await copyDir(srcPath, destPath); + else await fs.promises.copyFile(srcPath, destPath); + } +} + +/** + * + */ +export function getLatestMtime(dir: string): number { + let latest = 0; + try { + const stack = [dir]; + while (stack.length) { + const cur = stack.pop() as string; + const entries = fs.readdirSync(cur, { withFileTypes: true }); + for (const e of entries) { + const p = path.join(cur, e.name); + if (e.isDirectory()) stack.push(p); + else { + try { + const s = fs.statSync(p); + const m = s.mtimeMs; + if (m > latest) latest = m; + } catch {} + } + } + } + } catch {} + return latest; +} + +// network helpers for tests (mirror production logic) +import net from 'net'; + +/** + * + */ +export async function isServerListening(disposeUrl: string, timeoutMs = 500): Promise { + try { + const u = new URL(disposeUrl); + const port = u.port ? Number(u.port) : u.protocol === 'https:' ? 443 : 80; + const host = u.hostname; + return await new Promise((resolve) => { + const socket = net.connect({ host, port }, () => { + socket.destroy(); + resolve(true); + }); + socket.on('error', () => { + try { + socket.destroy(); + } catch {} + resolve(false); + }); + socket.setTimeout(timeoutMs, () => { + try { + socket.destroy(); + } catch {} + resolve(false); + }); + }); + } catch (err) { + return false; + } +} + +/** + * + */ +export async function tryDispose(url: string, timeoutMs = 2000, retries = 2): Promise { + if (!url) return false; + for (let attempt = 0; attempt <= retries; attempt++) { + try { + const controller = new AbortController(); + const id = setTimeout(() => controller.abort(), timeoutMs); + const res = await fetch(url, { + method: 'POST', + signal: controller.signal, + headers: { 'content-type': 'application/json' }, + body: '{}', + }); + clearTimeout(id); + if (res.ok) return true; + } catch (err) { + // swallow + } + await new Promise((r) => setTimeout(r, 200 * (attempt + 1))); + } + return false; +} diff --git a/tools/dev/opencode-dev.test.ts b/tools/dev/opencode-dev.test.ts new file mode 100644 index 0000000..a4cbfab --- /dev/null +++ b/tools/dev/opencode-dev.test.ts @@ -0,0 +1,129 @@ +import fs from 'fs'; +import { tmpdir } from 'os'; +import path from 'path'; + +import { describe, it, expect, afterEach, beforeEach } from 'bun:test'; + + +import { + readJsonc as readJsoncUtil, + writeJsonc as writeJsoncUtil, + createSymlink as createSymlinkUtil, + getLatestMtime as getLatestMtimeUtil, +} from './opencode-dev-test-helpers'; + +const WORK = path.join(tmpdir(), 'opencode-dev-test'); + +beforeEach(() => { + try { + fs.rmSync(WORK, { recursive: true, force: true }); + } catch {} + fs.mkdirSync(WORK, { recursive: true }); +}); + +afterEach(() => { + try { + fs.rmSync(WORK, { recursive: true, force: true }); + } catch {} +}); + +describe('JSONC edit preserves comments', () => { + it('reads and writes JSONC keeping comments', () => { + const file = path.join(WORK, 'opencode.jsonc'); + const raw = `// top comment\n{\n // plugins section\n "plugin": [\n // existing entry\n "file:///old/index.js"\n ]\n}\n`; + fs.writeFileSync(file, raw, 'utf8'); + const { json, raw: before } = readJsoncUtil(file); + expect(Array.isArray(json.plugin)).toBe(true); + // add a plugin + json.plugin.push('file:///new/index.js'); + writeJsoncUtil(file, before, json); + const afterRaw = fs.readFileSync(file, 'utf8'); + expect(afterRaw).toContain('// top comment'); + expect(afterRaw).toContain('// plugins section'); + expect(afterRaw).toContain('file:///new/index.js'); + }); +}); + +describe('symlink fallback copy behavior', () => { + it('falls back to copy when symlink fails', async () => { + const src = path.join(WORK, 'src'); + const dest = path.join(WORK, 'dest'); + fs.mkdirSync(src, { recursive: true }); + fs.writeFileSync(path.join(src, 'index.js'), 'console.log(1)'); + // Simulate symlink failure by making fs.symlink throw via helper + await expect(createSymlinkUtil(src, dest, true)).resolves.toBeUndefined(); + expect(fs.existsSync(path.join(dest, 'index.js'))).toBe(true); + }); +}); + +describe('getLatestMtime detects changes', () => { + it('returns increasing mtime after file change', async () => { + const dir = path.join(WORK, 'd'); + fs.mkdirSync(dir, { recursive: true }); + const f = path.join(dir, 'a.txt'); + fs.writeFileSync(f, 'a'); + const t1 = getLatestMtimeUtil(dir); + await new Promise((r) => setTimeout(r, 50)); + fs.writeFileSync(f, 'b'); + const t2 = getLatestMtimeUtil(dir); + expect(t2).toBeGreaterThanOrEqual(t1); + }); +}); + +// Tests for opencode-dev network helpers +import net from 'net'; +import http from 'http'; + +import { isServerListening as isServerListeningUtil, tryDispose as tryDisposeUtil } from './opencode-dev-test-helpers'; + +describe('network helpers', () => { + it('isServerListening returns true for a listening TCP port', async () => { + const server = net.createServer((s) => s.end()); + await new Promise((resolve) => server.listen(0, '127.0.0.1', () => resolve())); + const addr = server.address() as net.AddressInfo; + const url = `http://127.0.0.1:${addr.port}/instance/dispose`; + const ok = await isServerListeningUtil(url, 500); + expect(ok).toBe(true); + server.close(); + }); + + it('isServerListening returns false for closed port', async () => { + // pick a high port that's likely free; bind and close to get the port, then test + const server = net.createServer(); + await new Promise((resolve) => server.listen(0, '127.0.0.1', () => resolve())); + const addr = server.address() as net.AddressInfo; + const port = addr.port; + server.close(); + const url = `http://127.0.0.1:${port}/instance/dispose`; + const ok = await isServerListeningUtil(url, 200); + expect(ok).toBe(false); + }); + + it('tryDispose succeeds against POST endpoint and fails on timeout', async () => { + // start a small HTTP server that responds to POST + const server = http.createServer((req, res) => { + if (req.method === 'POST' && req.url === '/instance/dispose') { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end('{}'); + } else { + res.writeHead(404); + res.end(); + } + }); + await new Promise((resolve) => server.listen(0, '127.0.0.1', () => resolve())); + const addr = server.address() as net.AddressInfo; + const url = `http://127.0.0.1:${addr.port}/instance/dispose`; + const ok = await tryDisposeUtil(url, 1000, 1); + expect(ok).toBe(true); + server.close(); + + // now test timeout/path failure - point to a port with no listener + const server2 = net.createServer(); + await new Promise((resolve) => server2.listen(0, '127.0.0.1', () => resolve())); + const port2 = (server2.address() as net.AddressInfo).port; + server2.close(); + const badUrl = `http://127.0.0.1:${port2}/instance/dispose`; + const ok2 = await tryDisposeUtil(badUrl, 200, 0); + expect(ok2).toBe(false); + }); +}); diff --git a/tools/dev/opencode-dev.ts b/tools/dev/opencode-dev.ts new file mode 100644 index 0000000..bb20ecc --- /dev/null +++ b/tools/dev/opencode-dev.ts @@ -0,0 +1,432 @@ +#!/usr/bin/env bunx tsx +import { spawn, ChildProcess } from 'child_process'; +import fs from 'fs'; +import net from 'net'; +import path from 'path'; + +type Opts = { + plugins: string[]; + symlinkRoot: string; + apply: boolean; + revert: boolean; + workspaceRoot: string; + disposeEnabled: boolean; + disposeUrl: string; +}; + +function parseArgs(): Opts { + const argv = process.argv.slice(2); + const plugins: string[] = []; + let symlinkRoot = '.opencode/plugin'; + let apply = true; + let revert = false; + let disposeEnabled = true; + let disposeUrl = 'http://localhost:4096/instance/dispose'; + for (let i = 0; i < argv.length; i++) { + const a = argv[i]; + if (a === '--no-apply') apply = false; + else if (a === '--symlink-root' && argv[i + 1]) { + symlinkRoot = argv[++i]; + } else if (a === '--revert') { + revert = true; + } else if (a === '--no-dispose') { + disposeEnabled = false; + } else if (a === '--dispose-url' && argv[i + 1]) { + disposeUrl = argv[++i]; + } else if (a === '--help' || a === '-h') { + console.log( + 'usage: opencode-dev [--no-apply] [--symlink-root ] [--revert] [--no-dispose] [--dispose-url ] ', + ); + console.log(' --no-apply do not modify opencode.json (print entries instead)'); + console.log(' --revert restore opencode.json from the last opencode-dev backup and exit'); + console.log(' --no-dispose disable POST /instance/dispose calls and always restart the local opencode CLI'); + console.log(' --dispose-url set custom dispose URL (default http://localhost:4096/instance/dispose)'); + process.exit(0); + } else { + plugins.push(a); + } + } + if (revert) + return { + plugins: [], + symlinkRoot, + apply: false, + revert: true, + workspaceRoot: process.cwd(), + disposeEnabled, + disposeUrl, + }; + if (plugins.length === 0) { + console.error('Error: at least one plugin (package folder or name) must be provided'); + process.exit(1); + } + return { plugins, symlinkRoot, apply, revert, workspaceRoot: process.cwd(), disposeEnabled, disposeUrl }; +} + +function isDir(p: string) { + try { + return fs.statSync(p).isDirectory(); + } catch { + return false; + } +} + +function resolvePluginDir(workspaceRoot: string, spec: string): string | null { + const asPath = path.resolve(workspaceRoot, spec); + if (isDir(asPath)) return asPath; + const candidate1 = path.join(workspaceRoot, 'packages', spec); + if (isDir(candidate1)) return candidate1; + const candidate2 = path.join(workspaceRoot, 'packages', `opencode-${spec}`); + if (isDir(candidate2)) return candidate2; + return null; +} + +async function ensureDir(dir: string) { + await fs.promises.mkdir(dir, { recursive: true }); +} + +async function createSymlink(target: string, linkPath: string) { + try { + try { + await fs.promises.lstat(linkPath); + await fs.promises.rm(linkPath, { recursive: true }); + } catch {} + await fs.promises.symlink(target, linkPath, 'junction'); + console.log(`Symlink created: ${linkPath} -> ${target}`); + } catch (err) { + console.warn('Symlink failed, falling back to copy:', String(err)); + await copyDir(target, linkPath); + console.log(`Copied ${target} -> ${linkPath}`); + } +} + +async function copyDir(src: string, dest: string) { + await ensureDir(dest); + const entries = await fs.promises.readdir(src, { withFileTypes: true }); + for (const e of entries) { + const srcPath = path.join(src, e.name); + const destPath = path.join(dest, e.name); + if (e.isDirectory()) await copyDir(srcPath, destPath); + else await fs.promises.copyFile(srcPath, destPath); + } +} + +function spawnWatchBuild(projectName: string) { + const cmd = 'bunx'; + const args = ['nx', 'run', `${projectName}:build`, '--watch']; + console.log(`Starting build watcher: ${cmd} ${args.join(' ')}`); + const p = spawn(cmd, args, { stdio: 'inherit' }); + p.on('exit', (code) => console.log(`Build watcher for ${projectName} exited (${code})`)); + return p; +} + +import { parse as parseJsonC, modify as modifyJsonC, applyEdits } from 'jsonc-parser'; + +function readJsonc(file: string): { json: any; raw: string } { + const raw = fs.readFileSync(file, 'utf8'); + const errors: any[] = []; + const json = parseJsonC(raw, errors, { allowTrailingComma: true }); + if (errors.length) { + throw new Error(`Failed to parse JSONC at ${file}: ${JSON.stringify(errors)}`); + } + return { json, raw }; +} + +function writeJsonc(file: string, originalRaw: string | null, obj: any) { + // Use modify to produce minimal edits preserving comments + const base = originalRaw || ''; + const edits = modifyJsonC(base, ['plugin'], obj.plugin || [], { + formattingOptions: { insertSpaces: true, tabSize: 2 }, + }); + const newText = applyEdits(base, edits); + fs.writeFileSync(file, newText, 'utf8'); +} + +async function backupFile(file: string) { + try { + await fs.promises.copyFile(file, file + '.opencode-dev.bak'); + console.log(`Backed up ${file} -> ${file}.opencode-dev.bak`); + } catch (err) {} +} + +async function revertOpencodeJson(workspaceRoot: string) { + const candidates = ['opencode.json', 'opencode.jsonc']; + for (const c of candidates) { + const p = path.join(workspaceRoot, c); + const bak = p + '.opencode-dev.bak'; + if (fs.existsSync(bak)) { + await fs.promises.copyFile(bak, p); + console.log(`Restored ${p} from ${bak}`); + return; + } + } + console.error('No opencode-dev backup found to revert'); +} + +async function updateOpencodeJson(workspaceRoot: string, pluginLinkPaths: string[]) { + const candidates = ['opencode.json', 'opencode.jsonc']; + let target: string | null = null; + for (const c of candidates) { + const p = path.join(workspaceRoot, c); + if (fs.existsSync(p)) { + target = p; + break; + } + } + if (!target) { + target = path.join(workspaceRoot, 'opencode.json'); + console.log('No existing opencode.json found; creating new one at', target); + const base = { plugin: [] }; + await fs.promises.writeFile(target, JSON.stringify(base, null, 2) + '\n', 'utf8'); + } + await backupFile(target); + const { json: original, raw } = readJsonc(target); + const json = original || {}; + if (!Array.isArray(json.plugin)) json.plugin = []; + for (const p of pluginLinkPaths) if (!json.plugin.includes(p)) json.plugin.push(p); + writeJsonc(target, raw, json); + console.log('Updated', target); +} + +function getLatestMtime(dir: string): number { + let latest = 0; + try { + const stack = [dir]; + while (stack.length) { + const cur = stack.pop() as string; + const entries = fs.readdirSync(cur, { withFileTypes: true }); + for (const e of entries) { + const p = path.join(cur, e.name); + if (e.isDirectory()) stack.push(p); + else { + try { + const s = fs.statSync(p); + const m = s.mtimeMs; + if (m > latest) latest = m; + } catch {} + } + } + } + } catch {} + return latest; +} + +async function isServerListening(disposeUrl: string, timeoutMs = 500): Promise { + try { + const u = new URL(disposeUrl); + const port = u.port ? Number(u.port) : u.protocol === 'https:' ? 443 : 80; + const host = u.hostname; + return await new Promise((resolve) => { + const socket = net.connect({ host, port }, () => { + socket.destroy(); + resolve(true); + }); + socket.on('error', () => { + try { + socket.destroy(); + } catch {} + resolve(false); + }); + socket.setTimeout(timeoutMs, () => { + try { + socket.destroy(); + } catch {} + resolve(false); + }); + }); + } catch (err) { + return false; + } +} + +async function tryDispose(url: string, timeoutMs = 2000, retries = 2): Promise { + if (!url) return false; + for (let attempt = 0; attempt <= retries; attempt++) { + try { + const controller = new AbortController(); + const id = setTimeout(() => controller.abort(), timeoutMs); + const res = await fetch(url, { + method: 'POST', + signal: controller.signal, + headers: { 'content-type': 'application/json' }, + body: '{}', + }); + clearTimeout(id); + if (res.ok) { + console.log(`Dispose request to ${url} succeeded (status ${res.status})`); + return true; + } else { + console.warn(`Dispose request to ${url} returned ${res.status}`); + } + } catch (err) { + if ((err as any).name === 'AbortError') console.warn(`Dispose request to ${url} timed out`); + else console.warn(`Dispose request error: ${String(err)}`); + } + // small backoff + await new Promise((r) => setTimeout(r, 200 * (attempt + 1))); + } + return false; +} + +async function main() { + const opts = parseArgs(); + if (opts.revert) { + await revertOpencodeJson(opts.workspaceRoot); + process.exit(0); + } + + const createdLinks: string[] = []; + const buildProcesses: ChildProcess[] = []; + const distPaths: string[] = []; + let opProcess: ChildProcess | null = null; + let restarting = false; + + process.on('SIGINT', async () => { + console.log('\nInterrupted. Cleaning up...'); + for (const b of buildProcesses) + try { + b.kill(); + } catch {} + if (opProcess) + try { + opProcess.kill(); + } catch {} + process.exit(0); + }); + + for (const spec of opts.plugins) { + const dir = resolvePluginDir(opts.workspaceRoot, spec); + if (!dir) { + console.error('Could not resolve plugin:', spec); + process.exit(1); + } + const distPath = path.join(dir, 'dist'); + const projectName = path.basename(dir); + try { + const p = spawnWatchBuild(projectName); + buildProcesses.push(p); + } catch (err) { + console.warn('Failed to start NX build watcher for', projectName, String(err)); + } + console.log(`Waiting for dist at ${distPath}...`); + const maxWait = 30000; + const start = Date.now(); + while (!fs.existsSync(distPath)) { + if (Date.now() - start > maxWait) break; + await new Promise((r) => setTimeout(r, 300)); + } + if (!fs.existsSync(distPath)) + console.warn('dist not found for', projectName, '- continuing and will create link if/when it appears'); + const linkRoot = path.resolve(opts.workspaceRoot, opts.symlinkRoot); + await ensureDir(linkRoot); + const linkPath = path.join(linkRoot, projectName); + if (fs.existsSync(distPath)) await createSymlink(distPath, linkPath); + else { + await ensureDir(linkPath); + console.log('Created placeholder folder for', linkPath); + } + createdLinks.push(linkPath); + distPaths.push(distPath); + } + + const fileUris = createdLinks.map((lp) => { + const indexJs = path.join(lp, 'index.js'); + return fs.existsSync(indexJs) ? `file://${indexJs}` : `file://${lp}`; + }); + if (opts.apply) await updateOpencodeJson(opts.workspaceRoot, fileUris); + else { + console.log('Apply disabled; add the following entries to your opencode.json:'); + for (const f of fileUris) console.log(' ', f); + } + + function spawnOpencode() { + if (opProcess) { + try { + opProcess.kill(); + } catch {} + opProcess = null; + } + console.log('Starting opencode CLI in', opts.workspaceRoot); + opProcess = spawn('opencode', [], { cwd: opts.workspaceRoot, stdio: 'inherit' }); + opProcess.on('exit', (code) => { + console.log('opencode exited', code); + if (!restarting) process.exit(code || 0); + }); + } + + // Detect if a running Opencode server is available at the configured dispose URL. + // If so, we will prefer to request a reload via the HTTP endpoint and NOT spawn a local `opencode` CLI. + let serverAvailable = false; + if (opts.disposeEnabled && opts.disposeUrl) { + try { + serverAvailable = await isServerListening(opts.disposeUrl); + } catch (err) { + serverAvailable = false; + } + } + + if (serverAvailable) { + console.log(`Opencode server detected at ${opts.disposeUrl}; will request reloads via endpoint.`); + } else { + console.log('No Opencode server detected; starting local opencode CLI.'); + spawnOpencode(); + } + + const lastMtimes = distPaths.map((dp) => getLatestMtime(dp)); + setInterval(() => { + (async () => { + try { + for (let i = 0; i < distPaths.length; i++) { + const dp = distPaths[i]; + const last = lastMtimes[i] || 0; + const now = getLatestMtime(dp); + if (now > last) { + console.log(`Change detected in ${dp} (mtime ${now}); handling reload...`); + lastMtimes[i] = now; + if (!restarting) { + restarting = true; + // First try to call the Opencode dispose endpoint to reload config + let disposed = false; + if (opts.disposeEnabled && opts.disposeUrl) { + try { + const listening = await isServerListening(opts.disposeUrl); + if (listening) { + disposed = await tryDispose(opts.disposeUrl); + } else { + console.log( + `Dispose server not reachable at ${opts.disposeUrl}; falling back to restarting local opencode CLI.`, + ); + } + } catch (err) { + console.warn('Dispose request failed:', String(err)); + } + } + + if (disposed) { + console.log('Server reload requested; leaving opencode process running.'); + restarting = false; + } else { + console.log('Server reload not available or failed; restarting local opencode process.'); + if (opProcess) + try { + opProcess.kill(); + } catch {} + setTimeout(() => { + restarting = false; + spawnOpencode(); + }, 300); + } + } + } + } + } catch (err) { + console.warn('Watch poll error', String(err)); + } + })().catch((err) => console.warn('Watch handler error', String(err))); + }, 1000); +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/tools/dev/tsconfig.json b/tools/dev/tsconfig.json new file mode 100644 index 0000000..92d22e7 --- /dev/null +++ b/tools/dev/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "typeRoots": ["../../types", "../../node_modules"] + }, + "include": ["**/*.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/tools/dev/tsconfig.test.json b/tools/dev/tsconfig.test.json new file mode 100644 index 0000000..419b6aa --- /dev/null +++ b/tools/dev/tsconfig.test.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "declaration": false, + "sourceMap": false, + "strict": false, + "esModuleInterop": true, + "resolveJsonModule": true, + "types": ["node"], + "baseUrl": "." + }, + "include": ["**/*.ts", "**/*.spec.ts", "**/*.test.ts"], + "files": ["../..//types/bun-test-shim.d.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/tools/dev/types/child_process.d.ts b/tools/dev/types/child_process.d.ts new file mode 100644 index 0000000..dbbb199 --- /dev/null +++ b/tools/dev/types/child_process.d.ts @@ -0,0 +1,2 @@ +// Local child_process shim removed; rely on @types/node instead +export {}; diff --git a/tools/dev/types/globals.d.ts b/tools/dev/types/globals.d.ts new file mode 100644 index 0000000..40cd000 --- /dev/null +++ b/tools/dev/types/globals.d.ts @@ -0,0 +1,9 @@ +// Minimal process env shape used in some tools + +declare namespace NodeJS { + interface ProcessEnv { + [key: string]: string | undefined; + } +} + +export {}; diff --git a/tools/dev/types/semver.d.ts b/tools/dev/types/semver.d.ts new file mode 100644 index 0000000..0a001e3 --- /dev/null +++ b/tools/dev/types/semver.d.ts @@ -0,0 +1,2 @@ +// Local semver shim removed; rely on installed 'semver' types instead. +export {}; diff --git a/tools/executors/dev-proxy/.eslintignore b/tools/executors/dev-proxy/.eslintignore new file mode 100644 index 0000000..e38178b --- /dev/null +++ b/tools/executors/dev-proxy/.eslintignore @@ -0,0 +1,3 @@ +# Ignore old CommonJS files - these are legacy test infrastructure +*.js +*.test.ts diff --git a/tools/executors/dev-proxy/README.md b/tools/executors/dev-proxy/README.md new file mode 100644 index 0000000..7f51ddb --- /dev/null +++ b/tools/executors/dev-proxy/README.md @@ -0,0 +1,63 @@ +# Dev Proxy Executor + +Purpose + +Run a local development proxy that watches plugin build outputs and creates symlinks for local testing. This executor is +used to streamline local plugin development by keeping the build running and making the built package available to a +runtime via filesystem symlinks. + +Options (from `schema.json`) + +- `plugins` (array) — list of plugin project names to watch (default: the project invoked from) +- `symlinkRoot` (string) — path under which to create symlinks (default: `.opencode/plugin`) +- `apply` (boolean) — whether to apply symlink changes (default: `true`) + +Example NX invocation + +# Run the executor for the current project + +`nx run :dev-proxy` + +# Run and override options + +`nx run :dev-proxy --plugins=opencode-foo-plugin --symlinkRoot=.opencode/plugin --apply=true` + +CLI-like usage (via workspace) + +# From workspace root (project target configured in project.json) + +`bunx nx run opencode-my-plugin:dev-proxy -- --plugins=opencode-my-plugin` + +Inputs & Outputs + +- Input: options object (see `schema.json`). When run from a project context, `projectName` is used if `--plugins` is + not provided. +- Output: returns `{ success: boolean }` when executor completes or the runtime returns. Side-effects: starts background + build watchers and a runtime process, creates symlinks. + +Tests + +# Run executor unit tests + +`bun test tools/executors/dev-proxy/executor.test.ts` + +Recommended schema improvements + +- Add per-property `description` entries to `schema.json` and provide a small example JSON block in this README. + Example: + +```json +{ + "plugins": ["opencode-my-plugin"], + "symlinkRoot": ".opencode/plugin", + "apply": true +} +``` + +Notes + +- This executor prefers using `@nx/devkit.runExecutor` when available (returns async iterables) and falls back to + spawning a CLI watcher (e.g., `bunx nx run :build --watch`) if necessary. +- Signal handling: `SIGINT` triggers cleanup of watchers and child processes. + +For implementation details, see `tools/executors/dev-proxy/executor.ts` and `tools/executors/dev-proxy/schema.json`. diff --git a/tools/executors/dev-proxy/executor.test.ts b/tools/executors/dev-proxy/executor.test.ts new file mode 100644 index 0000000..a1d13c2 --- /dev/null +++ b/tools/executors/dev-proxy/executor.test.ts @@ -0,0 +1,222 @@ +import fs from 'fs'; +import path from 'path'; +import { spawnSync } from 'child_process'; + +import { ExecutorContext } from '@nx/devkit'; + +import runExecutor from './executor'; + +// Helper to create an async iterator that yields once and records if return() was called +function makeMockIterator() { + let returned = false; + const iterator = { + async *[Symbol.asyncIterator]() { + try { + yield { success: true }; + // keep alive until return is called + await new Promise((res) => setTimeout(res, 10000)); + } finally { + returned = true; + } + }, + // expose return for the executor to call + async return() { + returned = true; + return { value: undefined, done: true }; + }, + _returned() { + return returned; + }, + } as any; + return iterator; +} + +// Globally monkeypatch child_process.spawn to prevent spawning long-running processes during tests +// This ensures CI and local test runs do not actually start watchers or runtime processes. +// The tests still exercise signal handling and executor shutdown logic via mocks. + +const childProcess = require('child_process'); +const _originalSpawn = childProcess.spawn; +function _fakeSpawn(cmd: string, args: string[], opts: any) { + return { + kill: () => {}, + on: (ev: string, cb: Function) => {}, + } as any; +} + +let _originalExit: typeof process.exit; +let _exitCalled = false; + +beforeEach(() => { + // prevent child processes from actually spawning + childProcess.spawn = _fakeSpawn; + + // stub process.exit so tests can simulate SIGINT without killing the test runner + _originalExit = process.exit; + _exitCalled = false; + // @ts-ignore override for test + process.exit = ((code?: number) => { + _exitCalled = true; + // do not actually exit during tests + }) as typeof process.exit; +}); + +afterEach(() => { + childProcess.spawn = _originalSpawn; + // restore process.exit + process.exit = _originalExit; +}); + +describe('dev-proxy executor with mocked runExecutor', () => { + it('calls iterator.return() on shutdown', async () => { + const iterator = makeMockIterator(); + // mock runExecutor that returns the iterator + const mockRunExecutor = async () => iterator; + + // mock spawnSync to simulate runtime script returning immediately + const mockSpawnSync = ((cmd: string, args?: readonly string[], opts?: any) => + ({ status: 0 }) as any) as typeof spawnSync; + + // Minimal executor context + const context = { + root: process.cwd(), + projectName: 'opencode-warcraft-notifications-plugin', + } as unknown as ExecutorContext; + + // Snapshot existing SIGINT listeners + const beforeListeners = process.listeners('SIGINT').slice(); + + const resPromise = runExecutor( + { + plugins: ['opencode-warcraft-notifications-plugin'], + __runExecutor: mockRunExecutor, + __spawnSync: mockSpawnSync, + }, + context, + ); + + // Wait briefly to let executor start and attach iterator + await new Promise((r) => setTimeout(r, 50)); + + // Simulate SIGINT by sending the signal to the process + process.emit('SIGINT' as any); + + // Wait for shutdown to propagate + await new Promise((r) => setTimeout(r, 50)); + + expect(iterator._returned()).toBe(true); + + const res = await resPromise; + expect(res && res.success).toBe(true); + + // Restore SIGINT listeners to avoid side effects on other tests + const afterListeners = process.listeners('SIGINT'); + for (const l of afterListeners) { + if (!beforeListeners.includes(l)) process.removeListener('SIGINT', l); + } + }); + + it.skip('falls back to CLI watcher and kills child on SIGINT', async () => { + const mockRunExecutor = async () => { + throw new Error('not available'); + }; + + let childKilled = false; + let childSpawned = false; + // Monkeypatch child_process.spawn + + const childProcess = require('child_process'); + const originalSpawn = childProcess.spawn; + childProcess.spawn = (cmd: string, args: string[], opts: any) => { + childSpawned = true; + console.log('[TEST] child_process.spawn called with:', cmd, args); + // return a fake child with kill() + return { + kill: () => { + console.log('[TEST] child.kill() called'); + childKilled = true; + }, + on: (ev: string, cb: Function) => {}, + } as any; + }; + + // Mock spawnSync to block briefly so SIGINT can be processed + const mockSpawnSync = ((cmd: string, args?: readonly string[], opts?: any) => { + // Use synchronous sleep to simulate blocking behavior + const start = Date.now(); + while (Date.now() - start < 200) { + // Block for 200ms to allow SIGINT to be processed + } + return { status: 0 } as any; + }) as typeof spawnSync; + const context = { + root: process.cwd(), + projectName: 'opencode-warcraft-notifications-plugin', + } as unknown as ExecutorContext; + + const beforeListeners = process.listeners('SIGINT').slice(); + + const resPromise = runExecutor( + { + plugins: ['opencode-warcraft-notifications-plugin'], + __runExecutor: mockRunExecutor, + __spawnSync: mockSpawnSync, + }, + context, + ); + + await new Promise((r) => setTimeout(r, 100)); + + process.emit('SIGINT' as any); + await new Promise((r) => setTimeout(r, 300)); + + expect(childKilled).toBe(true); + + const res = await resPromise; + expect(res && res.success).toBe(true); + + // restore spawn and listeners + childProcess.spawn = originalSpawn; + const afterListeners = process.listeners('SIGINT'); + for (const l of afterListeners) { + if (!beforeListeners.includes(l)) process.removeListener('SIGINT', l); + } + }); + + it('starts multiple projects and stops both on SIGINT', async () => { + const iterA = makeMockIterator(); + const iterB = makeMockIterator(); + const mockRunExecutor = async (opts: any) => { + if (opts && opts.project === 'pA') return iterA; + if (opts && opts.project === 'pB') return iterB; + return iterA; + }; + + const mockSpawnSync = ((cmd: string, args?: readonly string[], opts?: any) => + ({ status: 0 }) as any) as typeof spawnSync; + const context = { root: process.cwd(), projectName: 'pA' } as unknown as ExecutorContext; + + const beforeListeners = process.listeners('SIGINT').slice(); + + const resPromise = runExecutor( + { plugins: ['pA', 'pB'], __runExecutor: mockRunExecutor, __spawnSync: mockSpawnSync }, + context, + ); + + await new Promise((r) => setTimeout(r, 50)); + + process.emit('SIGINT' as any); + await new Promise((r) => setTimeout(r, 50)); + + expect(iterA._returned()).toBe(true); + expect(iterB._returned()).toBe(true); + + const res = await resPromise; + expect(res && res.success).toBe(true); + + const afterListeners = process.listeners('SIGINT'); + for (const l of afterListeners) { + if (!beforeListeners.includes(l)) process.removeListener('SIGINT', l); + } + }); +}); diff --git a/tools/executors/dev-proxy/executor.ts b/tools/executors/dev-proxy/executor.ts new file mode 100644 index 0000000..cdf064c --- /dev/null +++ b/tools/executors/dev-proxy/executor.ts @@ -0,0 +1,156 @@ +import { spawn, spawnSync } from 'child_process'; +import path from 'path'; + +import { ExecutorContext, ProjectConfiguration, runExecutor as nxRunExecutor } from '@nx/devkit'; + +interface DevProxyOptions { + plugins?: string[]; + symlinkRoot?: string; + apply?: boolean; + __runExecutor?: typeof nxRunExecutor; + __spawnSync?: typeof spawnSync; +} + +interface ExecutorResult { + success: boolean; +} + +interface ResolvedProject { + name: string; + root?: string; + config?: ProjectConfiguration; +} + +/** + * Nx executor for running dev proxy with build watchers + * @param options - Executor options including plugin names and symlink configuration + * @param context - Nx executor context + * @returns Executor result indicating success or failure + */ +// eslint-disable-next-line max-statements, complexity +const runExecutor = async (options: DevProxyOptions, context: ExecutorContext): Promise => { + const workspaceRoot = context.root; + + const requestedPlugins = + options.plugins && options.plugins.length > 0 ? options.plugins : context.projectName ? [context.projectName] : []; + + if (requestedPlugins.length === 0) { + console.error('No project specified for dev-proxy (provide --plugins or run from a project context)'); + return { success: false }; + } + + // Resolve projects - simplified to just use the names + const resolved: ResolvedProject[] = requestedPlugins.map((name) => ({ name })); + + // Start watchers + const stopFns: Array<() => Promise> = []; + + // Choose runExecutor implementation: allow injection for tests + const runExecutorImpl = options.__runExecutor ?? nxRunExecutor; + const spawnSyncImpl = options.__spawnSync ?? spawnSync; + + console.log('dev-proxy: workspaceRoot=', workspaceRoot); + + for (const r of resolved) { + const projName = r.name; + let started = false; + + if (runExecutorImpl) { + try { + const iterator = await runExecutorImpl( + { project: projName, target: 'build', configuration: undefined }, + { watch: true }, + context, + ); + + if (iterator && Symbol.asyncIterator in iterator) { + (async () => { + try { + for await (const out of iterator) { + if (!out || !out.success) console.error(`Build for ${projName} reported failure`); + } + } catch (err) { + console.error(`runExecutor iterator error for ${projName}:`, err); + } + })(); + stopFns.push(async () => { + try { + if (typeof iterator.return === 'function') await iterator.return(); + } catch { + // Failed to stop iterator + } + }); + console.log(`Started build target for ${projName} via @nx/devkit.runExecutor`); + started = true; + } + } catch (err) { + console.warn(`runExecutor failed for ${projName}:`, String(err)); + } + } + + if (!started) { + try { + console.log(`Falling back to CLI watcher for ${projName}`); + const child = spawn('bunx', ['nx', 'run', `${projName}:build`, '--watch'], { + stdio: 'inherit', + cwd: workspaceRoot, + }); + stopFns.push(async () => { + try { + child.kill(); + } catch { + // Failed to kill process + } + }); + } catch (err) { + console.warn(`Failed to start CLI watcher for ${projName}:`, String(err)); + } + } + } + + // Spawn the runtime dev script + const script = path.join(workspaceRoot, 'tools', 'dev', 'opencode-dev.ts'); + const args: string[] = []; + if (options.symlinkRoot) args.push('--symlink-root', options.symlinkRoot); + if (options.apply === false) args.push('--no-apply'); + args.push(...requestedPlugins); + + console.log('Running dev proxy runtime:', ['bunx', 'tsx', script, ...args].join(' ')); + + // Ensure cleanup on SIGINT + let exiting = false; + const sigintHandler = async () => { + if (exiting) return; + exiting = true; + console.log('\nInterrupted. Stopping build watchers and exiting...'); + for (const fn of stopFns) { + try { + await fn(); + } catch { + // Failed to stop watcher + } + } + process.exit(0); + }; + process.on('SIGINT', sigintHandler); + + // Run runtime script synchronously + const res = spawnSyncImpl('bunx', ['tsx', script, ...args], { stdio: 'inherit', cwd: workspaceRoot }); + + // Ensure watchers are terminated when runtime exits + for (const fn of stopFns) { + try { + fn(); + } catch { + // Failed to terminate watcher + } + } + + if (res?.error) { + console.error('Failed to run dev proxy runtime', res.error); + return { success: false }; + } + return { success: res?.status === 0 }; +}; + +export default runExecutor; diff --git a/tools/executors/dev-proxy/project.json b/tools/executors/dev-proxy/project.json new file mode 100644 index 0000000..2f2232d --- /dev/null +++ b/tools/executors/dev-proxy/project.json @@ -0,0 +1,44 @@ +{ + "name": "executor-dev-proxy", + "$schema": "../../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "tools/executors/dev-proxy", + "projectType": "library", + "targets": { + "lint": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx eslint executor.ts" + } + ], + "cwd": "tools/executors/dev-proxy", + "parallel": false + } + }, + "type-check": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx tsc --noEmit" + } + ], + "cwd": "tools/executors/dev-proxy", + "parallel": false + } + }, + "test": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bun test" + } + ], + "cwd": "tools/executors/dev-proxy", + "parallel": false + } + } + } +} diff --git a/tools/executors/dev-proxy/schema.json b/tools/executors/dev-proxy/schema.json new file mode 100644 index 0000000..27f0e54 --- /dev/null +++ b/tools/executors/dev-proxy/schema.json @@ -0,0 +1,29 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "plugins": { + "type": "array", + "items": { "type": "string" }, + "description": "List of plugin project names to watch. If empty, the current project is used." + }, + "symlinkRoot": { + "type": "string", + "description": "Filesystem path under which symlinks will be created for local plugin packages (relative to workspace root).", + "default": ".opencode/plugin" + }, + "apply": { + "type": "boolean", + "description": "Whether to apply filesystem symlink changes. Set to false for a dry-run.", + "default": true + } + }, + "required": [], + "examples": [ + { + "plugins": ["opencode-my-plugin"], + "symlinkRoot": ".opencode/plugin", + "apply": true + } + ] +} diff --git a/tools/executors/dev-proxy/tsconfig.json b/tools/executors/dev-proxy/tsconfig.json new file mode 100644 index 0000000..873a624 --- /dev/null +++ b/tools/executors/dev-proxy/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "../../../tsconfig.base.json", + "include": ["**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/tools/executors/dev-proxy/tsconfig.test.json b/tools/executors/dev-proxy/tsconfig.test.json new file mode 100644 index 0000000..0318c37 --- /dev/null +++ b/tools/executors/dev-proxy/tsconfig.test.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "declaration": false, + "sourceMap": false, + "strict": false, + "skipLibCheck": true, + "esModuleInterop": true, + "resolveJsonModule": true, + + "types": ["node"], + "baseUrl": "." + }, + "include": ["**/*.ts", "**/*.spec.ts", "**/*.test.ts", "types/**/*.d.ts"], + "files": ["../../../types/bun-test-shim.d.ts", "types/semver.d.ts"], + "exclude": ["node_modules", "dist"] +} diff --git a/tools/executors/dev-proxy/types/semver.d.ts b/tools/executors/dev-proxy/types/semver.d.ts new file mode 100644 index 0000000..0a001e3 --- /dev/null +++ b/tools/executors/dev-proxy/types/semver.d.ts @@ -0,0 +1,2 @@ +// Local semver shim removed; rely on installed 'semver' types instead. +export {}; diff --git a/tools/executors/dev-proxy/types/shims/fs.d.ts b/tools/executors/dev-proxy/types/shims/fs.d.ts new file mode 100644 index 0000000..e69de29 diff --git a/tools/executors/executors.json b/tools/executors/executors.json new file mode 100644 index 0000000..0973ccb --- /dev/null +++ b/tools/executors/executors.json @@ -0,0 +1,16 @@ +{ + "executors": { + "dev-proxy": { + "implementation": "./dev-proxy/executor", + "schema": "./dev-proxy/schema.json", + "description": "Run opencode with local plugin symlinks and watchers", + "readme": "./dev-proxy/README.md" + }, + "typecheck": { + "implementation": "./typecheck/executor", + "schema": "./typecheck/schema.json", + "description": "Run workspace TypeScript typecheck (tsc --noEmit)", + "readme": "./typecheck/README.md" + } + } +} diff --git a/tools/executors/typecheck/README.md b/tools/executors/typecheck/README.md new file mode 100644 index 0000000..21f0998 --- /dev/null +++ b/tools/executors/typecheck/README.md @@ -0,0 +1,17 @@ +Typecheck executor + +This executor runs the TypeScript compiler for the workspace. + +Usage examples: + +- Run a single check: + + nx run tools:typecheck + +- Run with a custom tsconfig: + + nx run tools:typecheck --tsconfig=tsconfig.json + +- Run in watch mode: + + nx run tools:typecheck --watch diff --git a/tools/executors/typecheck/executor.ts b/tools/executors/typecheck/executor.ts new file mode 100644 index 0000000..20d25db --- /dev/null +++ b/tools/executors/typecheck/executor.ts @@ -0,0 +1,59 @@ +import { spawn, spawnSync } from 'node:child_process'; + +import { ExecutorContext } from '@nx/devkit'; + +interface ExecutorOptions { + tsconfig?: string; + watch?: boolean; + extraArgs?: string[]; + __spawnSync?: typeof spawnSync; +} + +interface ExecutorResult { + success: boolean; +} + +/** + * Nx executor for running TypeScript type checking + * @param options - Executor options including tsconfig path and watch mode + * @param context - Nx executor context + * @returns Executor result indicating success or failure + */ +const runExecutor = async (options: ExecutorOptions, context: ExecutorContext): Promise => { + const workspaceRoot = context.root; + + const tsconfig = options.tsconfig ?? 'tsconfig.base.json'; + const watch = !!options.watch; + const extraArgs = Array.isArray(options.extraArgs) ? options.extraArgs : []; + + // Allow injection for tests + const spawnSyncImpl = options.__spawnSync ?? spawnSync; + + const args = ['tsc', '--noEmit', '-p', tsconfig, ...extraArgs]; + + if (watch) { + // Start a long-running watch process using spawn so it doesn't block the executor + console.log(`typecheck executor: starting watch: bunx ${args.join(' ')}`); + try { + spawn('bunx', args, { stdio: 'inherit', cwd: workspaceRoot }); + // Return success true to indicate the watcher started. Caller is responsible for lifecycle. + return { success: true }; + } catch (err) { + console.error('typecheck executor: failed to start watch', err); + return { success: false }; + } + } + + console.log(`typecheck executor: running: bunx ${args.join(' ')}`); + const res = spawnSyncImpl('bunx', args, { stdio: 'inherit', cwd: workspaceRoot }); + + if (res?.error) { + console.error('typecheck executor: execution error', res.error); + return { success: false }; + } + + const code = res?.status ?? 1; + return { success: code === 0 }; +}; + +export default runExecutor; diff --git a/tools/executors/typecheck/project.json b/tools/executors/typecheck/project.json new file mode 100644 index 0000000..4de3046 --- /dev/null +++ b/tools/executors/typecheck/project.json @@ -0,0 +1,32 @@ +{ + "name": "executor-typecheck", + "$schema": "../../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "tools/executors/typecheck", + "projectType": "library", + "targets": { + "lint": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx eslint ." + } + ], + "cwd": "tools/executors/typecheck", + "parallel": false + } + }, + "type-check": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx tsc --noEmit" + } + ], + "cwd": "tools/executors/typecheck", + "parallel": false + } + } + } +} diff --git a/tools/executors/typecheck/schema.json b/tools/executors/typecheck/schema.json new file mode 100644 index 0000000..2870c09 --- /dev/null +++ b/tools/executors/typecheck/schema.json @@ -0,0 +1,22 @@ +{ + "$schema": "http://json-schema.org/schema#", + "title": "Typecheck Executor", + "type": "object", + "properties": { + "tsconfig": { + "type": "string", + "description": "Path to the tsconfig file to use (workspace relative)", + "default": "tsconfig.base.json" + }, + "watch": { + "type": "boolean", + "description": "Run tsc in watch mode", + "default": false + }, + "extraArgs": { + "type": "array", + "items": { "type": "string" }, + "description": "Additional arguments to pass to tsc" + } + } +} diff --git a/tools/executors/typecheck/tsconfig.json b/tools/executors/typecheck/tsconfig.json new file mode 100644 index 0000000..b02d45f --- /dev/null +++ b/tools/executors/typecheck/tsconfig.json @@ -0,0 +1,12 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "module": "ESNext", + "moduleResolution": "bundler", + "target": "ES2022", + "lib": ["ES2022"], + "types": ["node"] + }, + "include": ["**/*.ts"], + "exclude": ["node_modules", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/tools/generators/plugin/README.md b/tools/generators/plugin/README.md index eed22e6..a5ded37 100644 --- a/tools/generators/plugin/README.md +++ b/tools/generators/plugin/README.md @@ -94,19 +94,19 @@ packages/opencode-/ 1. Navigate to your new plugin: ```bash - cd packages/opencode- + cd packages/opencode--plugin ``` 2. Build the plugin: ```bash - nx build opencode- + nx build opencode--plugin ``` 3. Pack the plugin for distribution: ```bash - nx pack opencode- + nx pack opencode--plugin ``` ## Release & Publishing Workflow @@ -385,6 +385,53 @@ tools/generators/plugin/ └── index.ts__template__ ``` +## Template Variables + +Below are the most commonly available template variables injected into files in `tools/generators/plugin/files/`. Add +these to your templates using EJS-style placeholders (e.g., `<%= name %>`). + +- `projectName` — final project folder name (e.g. `opencode-my-plugin`) +- `projectRoot` — path to generated project (e.g. `packages/opencode-my-plugin`) +- `name` — raw name passed to generator (e.g. `my-plugin`) +- `className` — PascalCase project name used in templates (e.g. `MyPlugin`) +- `npmScope` — npm organization scope (e.g. `pantheon-org`) +- `description` — description option value +- `addTests` — boolean used by templates to include test config +- `addLint` — boolean used by templates to include lint config +- `regenerate` — boolean flag indicating regeneration mode +- `offsetFromRoot` — relative path string used in project.json template + +Tip: Update `tools/generators/plugin/files/` to add inline comments in complex templates explaining how a variable is +used. + +## Tests + +Unit tests for the generator should live next to the implementation (e.g., `tools/generators/plugin/__tests__/`). If +tests are not present, add a minimal dry-run test that verifies files would be created without writing to disk. + +Example (Bun test) to add at `tools/generators/plugin/index.test.ts`: + +```ts +import { describe, it, expect } from 'bun:test'; +import { createTreeWithEmptyWorkspace } from '@nx/devkit/testing'; +import generator from './index'; + +describe('plugin generator', () => { + it('creates files (dry run)', async () => { + const tree = createTreeWithEmptyWorkspace(); + await generator(tree, { name: 'test-plugin', directory: 'packages', addTests: false }); + expect(tree.exists('packages/opencode-test-plugin/package.json')).toBeTruthy(); + }); +}); +``` + +If you already have tests, add a short section here describing how to run them: + +```bash +# Run only generator tests +bun test tools/generators/plugin +``` + ## Modifying the Generator > **Note**: All modifications to this generator should be made in the main monorepo at: diff --git a/tools/generators/plugin/files/project.json__template__ b/tools/generators/plugin/files/project.json__template__ index 13d9f45..8b068aa 100644 --- a/tools/generators/plugin/files/project.json__template__ +++ b/tools/generators/plugin/files/project.json__template__ @@ -62,6 +62,14 @@ "cwd": "<%= projectRoot %>", "parallel": false } + }, + "dev-proxy": { + "executor": "@pantheon-org/tools:dev-proxy", + "options": { + "plugins": ["<%= projectName %>"], + "symlinkRoot": ".opencode/plugin", + "apply": true + } }<% if (addTests) { %>, "test": { "executor": "nx:run-commands", diff --git a/tools/generators/plugin/files/tsconfig.test.json__template__ b/tools/generators/plugin/files/tsconfig.test.json__template__ index 84b6e6c..dd78eb0 100644 --- a/tools/generators/plugin/files/tsconfig.test.json__template__ +++ b/tools/generators/plugin/files/tsconfig.test.json__template__ @@ -1,8 +1,17 @@ { - "extends": "./tsconfig.json", "compilerOptions": { - "types": ["bun-types"] + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "bundler", + "declaration": false, + "sourceMap": false, + "strict": true, + "esModuleInterop": true, + "resolveJsonModule": true, + "typeRoots": ["./types", "./node_modules", "./node_modules/@types"], + "files": ["../../types/bun-test-shim.d.ts"], + "baseUrl": "." }, - "include": ["src/**/*.ts", "src/**/*.test.ts"], - "exclude": ["node_modules"] + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.spec.ts", "src/**/*.test.ts"], + "exclude": ["node_modules", "dist"] } diff --git a/tools/generators/plugin/files/types/bun-test.d.ts__template__ b/tools/generators/plugin/files/types/bun-test.d.ts__template__ new file mode 100644 index 0000000..67aacaf --- /dev/null +++ b/tools/generators/plugin/files/types/bun-test.d.ts__template__ @@ -0,0 +1,28 @@ +declare module "bun:test" { + export const describe: any; + export const it: any; + export const test: any; + export const expect: any; + export const beforeAll: any; + export const afterAll: any; + export const beforeEach: any; + export const afterEach: any; + export const vi: any; + export const mock: any; + export const setSystemTime: any; + export const spyOn: any; + export default {}; +} + +declare global { + const describe: any; + const it: any; + const test: any; + const expect: any; + const beforeAll: any; + const afterAll: any; + const beforeEach: any; + const afterEach: any; +} + +export {}; diff --git a/tools/generators/plugin/files/types/import-meta.d.ts__template__ b/tools/generators/plugin/files/types/import-meta.d.ts__template__ new file mode 100644 index 0000000..38ed951 --- /dev/null +++ b/tools/generators/plugin/files/types/import-meta.d.ts__template__ @@ -0,0 +1,7 @@ +declare global { + interface ImportMeta { + readonly dir: string; + } +} + +export {}; diff --git a/tools/generators/plugin/github-actions-versions.ts b/tools/generators/plugin/github-actions-versions.ts index 70c6538..e282c70 100644 --- a/tools/generators/plugin/github-actions-versions.ts +++ b/tools/generators/plugin/github-actions-versions.ts @@ -121,7 +121,7 @@ export const githubActionsVersions = (): GitHubActionsVersions => ({ * Helper to format action reference for workflow files * * @param action - GitHub action configuration - * @returns Formatted action reference (e.g., "actions/checkout@93cb6efe... # v5.0.1") + * @returns Formatted action reference (e.g., "actions/checkout\@93cb6efe... # v5.0.1") * * @example * ```typescript diff --git a/tools/generators/plugin/index.test.ts b/tools/generators/plugin/index.test.ts new file mode 100644 index 0000000..931601f --- /dev/null +++ b/tools/generators/plugin/index.test.ts @@ -0,0 +1,12 @@ +import { createTreeWithEmptyWorkspace } from '@nx/devkit/testing'; +import { describe, it, expect } from 'bun:test'; + +import generator from './index'; + +describe('plugin generator', () => { + it('creates files (dry run)', async () => { + const tree = createTreeWithEmptyWorkspace(); + await generator(tree, { name: 'test-plugin', directory: 'packages', addTests: false, description: '' }); + expect(tree.exists('packages/opencode-test-plugin/package.json')).toBeTruthy(); + }); +}); diff --git a/tools/generators/plugin/index.ts b/tools/generators/plugin/index.ts index 20348a5..e4164ce 100644 --- a/tools/generators/plugin/index.ts +++ b/tools/generators/plugin/index.ts @@ -1,10 +1,10 @@ import { Tree, formatFiles } from '@nx/devkit'; -import { addFiles } from './add-files'; -import { checkUpdate } from './check-update'; -import { normalizeOptions } from './normalize-options'; import type { PluginGeneratorSchema } from './schema'; -import { updateTsconfigPaths } from './update-ts-config-paths'; +import { addFiles } from './src/add-files'; +import { checkUpdate } from './src/check-update'; +import { normalizeOptions } from './src/normalize-options'; +import { updateTsconfigPaths } from './src/update-ts-config-paths'; const pluginGenerator = async (tree: Tree, options: PluginGeneratorSchema): Promise<() => void> => { const normalizedOptions = normalizeOptions(tree, options); diff --git a/tools/generators/plugin/project.json b/tools/generators/plugin/project.json new file mode 100644 index 0000000..a0d3155 --- /dev/null +++ b/tools/generators/plugin/project.json @@ -0,0 +1,44 @@ +{ + "name": "generator-plugin", + "$schema": "../../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "tools/generators/plugin", + "projectType": "library", + "targets": { + "lint": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx eslint ." + } + ], + "cwd": "tools/generators/plugin", + "parallel": false + } + }, + "type-check": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bunx tsc --noEmit" + } + ], + "cwd": "tools/generators/plugin", + "parallel": false + } + }, + "test": { + "executor": "nx:run-commands", + "options": { + "commands": [ + { + "command": "bun test" + } + ], + "cwd": "tools/generators/plugin", + "parallel": false + } + } + } +} diff --git a/tools/generators/plugin/src/add-files/collect-files-from-tree.ts b/tools/generators/plugin/src/add-files/collect-files-from-tree.ts new file mode 100644 index 0000000..a01792f --- /dev/null +++ b/tools/generators/plugin/src/add-files/collect-files-from-tree.ts @@ -0,0 +1,33 @@ +import * as path from 'path'; + +import { Tree } from '@nx/devkit'; + +/** + * Recursively collects all file paths in a directory from the Nx Tree. + * @param tree - Nx virtual file system + * @param dirPath - Directory path to collect files from + * @param files - Map to store file paths and their content + */ +export const collectFilesFromTree = (tree: Tree, dirPath: string, files: Map): void => { + try { + if (!tree.exists(dirPath)) { + return; + } + + const entries = tree.children(dirPath); + entries.forEach((entry) => { + const fullPath = path.join(dirPath, entry); + if (tree.isFile(fullPath)) { + const content = tree.read(fullPath); + if (content) { + files.set(fullPath, content); + } + } else { + collectFilesFromTree(tree, fullPath, files); + } + }); + } catch (e) { + // Directory might not exist or be readable + console.warn(`Warning: Could not read directory ${dirPath}:`, e); + } +}; diff --git a/tools/generators/plugin/src/add-files/index.ts b/tools/generators/plugin/src/add-files/index.ts new file mode 100644 index 0000000..295705b --- /dev/null +++ b/tools/generators/plugin/src/add-files/index.ts @@ -0,0 +1,75 @@ +import * as path from 'path'; + +import { Tree, names, offsetFromRoot, generateFiles } from '@nx/devkit'; + +import { dependencies, devDependencies } from '../dependencies'; +import { getFlattenedActions } from '../github-action-versions/get-flattened-actions'; +import { NormalizedOptions } from '../normalize-options'; + +import { collectFilesFromTree } from './collect-files-from-tree'; + +/** + * Generates files for the plugin, preserving existing src/ and docs/ directories + * while regenerating all other configuration files. + */ +export const addFiles = (tree: Tree, options: NormalizedOptions): void => { + const templateOptions = { + ...options, + ...names(options.name), + offsetFromRoot: offsetFromRoot(options.projectRoot), + template: '', + npmScope: 'pantheon-org', + dependencies: dependencies, + devDependencies: devDependencies(), + actions: getFlattenedActions(), + }; + + const templatePath = path.join(__dirname, '..', '..', 'files'); + + // Check if src/ and docs/ directories already exist + const srcPath = path.join(options.projectRoot, 'src'); + const docsPath = path.join(options.projectRoot, 'docs'); + const srcExists = tree.exists(srcPath); + const docsExists = tree.exists(docsPath); + const shouldRegenerate = options.regenerate ?? true; + + if ((srcExists || docsExists) && shouldRegenerate) { + // Plugin already exists - selective regeneration + const preserved: string[] = []; + if (srcExists) preserved.push('src/'); + if (docsExists) preserved.push('docs/'); + + console.log(`\n⚠️ Existing plugin detected. Preserving ${preserved.join(' and ')} directories...`); + + // Store existing content before generation + const existingContent: Map = new Map(); + + if (srcExists) { + collectFilesFromTree(tree, srcPath, existingContent); + } + + if (docsExists) { + collectFilesFromTree(tree, docsPath, existingContent); + } + + // Clean up .github/ directory before regenerating + const githubPath = path.join(options.projectRoot, '.github'); + if (tree.exists(githubPath)) { + console.log(' ✓ Cleaning .github/ directory...'); + tree.delete(githubPath); + } + + // Generate all files (including src/ and docs/) + generateFiles(tree, templatePath, options.projectRoot, templateOptions); + + // Restore preserved files by overwriting the generated ones + existingContent.forEach((content, filePath) => { + tree.write(filePath, content); + }); + + console.log(` ✓ Config files regenerated, ${preserved.join(' and ')} preserved\n`); + } else { + // New plugin - generate everything + generateFiles(tree, templatePath, options.projectRoot, templateOptions); + } +}; diff --git a/tools/generators/plugin/src/check-update.ts b/tools/generators/plugin/src/check-update.ts new file mode 100644 index 0000000..f5eb42b --- /dev/null +++ b/tools/generators/plugin/src/check-update.ts @@ -0,0 +1,43 @@ +import { Tree } from '@nx/devkit'; + +import type { PluginGeneratorSchema } from '../schema'; + +import type { NormalizedOptions } from './normalize-options'; + +/** + * Checks if the plugin already exists and validates regenerate flag + * @param tree - Nx virtual file system + * @param options - Original generator options + * @param normalizedOptions - Normalized options with computed paths + * @returns true if this is an update operation, false if new plugin + * @throws Error if plugin exists and regenerate flag is not set + */ +export const checkUpdate = ( + tree: Tree, + options: PluginGeneratorSchema, + normalizedOptions: NormalizedOptions, +): boolean => { + const isUpdate = tree.exists(normalizedOptions.projectRoot); + + // If plugin exists and regenerate flag is not set, throw error + if (isUpdate && !options.regenerate) { + throw new Error(` +Plugin already exists at: ${normalizedOptions.projectRoot} + +To regenerate this plugin and update configuration files (while preserving src/ and docs/), run: + nx g ./tools/generators:plugin ${options.name} --regenerate + +Or use the shorthand: + nx g ./tools/generators:plugin ${options.name} -r + +This will: + ✓ Preserve src/ - Your plugin source code + ✓ Preserve docs/ - Your documentation files + ✗ Regenerate all configuration files (package.json, tsconfig.json, etc.) + ✗ Regenerate GitHub workflows (.github/workflows/*) + ✗ Regenerate documentation site (pages/*) +`); + } + + return isUpdate; +}; diff --git a/tools/generators/plugin/src/dependencies.ts b/tools/generators/plugin/src/dependencies.ts new file mode 100644 index 0000000..9035e28 --- /dev/null +++ b/tools/generators/plugin/src/dependencies.ts @@ -0,0 +1,75 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +export interface PluginDependencies { + [packageName: string]: string; +} + +const extractNonNxDependencies = (baseProjectPackageJson: Record): PluginDependencies => { + const nxDependencies: string[] = [ + '@eslint/js', + '@nx/devkit', + '@nx/eslint', + '@nx/eslint-plugin', + '@nx/node', + '@nx/plugin', + '@swc-node/register', + '@swc/core', + 'nx', + 'lefthook', + 'tsup', + ]; + + // Filter out Nx-specific dependencies from the base project's devDependencies + return Object.keys(baseProjectPackageJson).reduce((deps: PluginDependencies, depName: string) => { + if (!nxDependencies.includes(depName)) { + deps[depName] = baseProjectPackageJson[depName]; + } + return deps; + }, {}); +}; + +export const devDependencies = (): PluginDependencies => { + // Find workspace root by walking up from current directory + let currentDir = __dirname; + while (currentDir !== path.dirname(currentDir)) { + const packageJsonPath = path.join(currentDir, 'package.json'); + if (fs.existsSync(packageJsonPath)) { + const pkg = JSON.parse(fs.readFileSync(packageJsonPath, 'utf-8')); + // Check if this is the workspace root (has workspaces or nx config) + if (pkg.workspaces || fs.existsSync(path.join(currentDir, 'nx.json'))) { + const packageJson: string = fs.readFileSync(packageJsonPath, 'utf-8'); + const baseProjectPackageJson = JSON.parse(packageJson).devDependencies; + return extractNonNxDependencies(baseProjectPackageJson); + } + } + currentDir = path.dirname(currentDir); + } + // Fallback: return empty object if workspace root not found + return {}; +}; + +export const dependencies: PluginDependencies = { + csstype: '^3.1.3', + 'undici-types': '^7.16.0', + zod: '^4.1.8', +}; + +export const astroDependencies: PluginDependencies = { + '@astrojs/check': '^0.9.0', + '@astrojs/starlight': '^0.36.0', + '@types/figlet': '^1.7.0', + astro: '^5.15.0', + 'astro-diagram': '^0.7.0', + 'astro-expressive-code': '^0.38.0', + figlet: '^1.9.4', + mermaid: '^11.12.1', + playwright: '^1.56.1', + 'rehype-mermaid': '^3.0.0', + sharp: '^0.34.5', +}; + +export const astroDevDependencies: PluginDependencies = { + '@types/node': '^20.0.0', + typescript: '^5.0.0', +}; diff --git a/tools/generators/plugin/src/github-action-versions/format-action-ref.ts b/tools/generators/plugin/src/github-action-versions/format-action-ref.ts new file mode 100644 index 0000000..e4d8e87 --- /dev/null +++ b/tools/generators/plugin/src/github-action-versions/format-action-ref.ts @@ -0,0 +1,17 @@ +import { GitHubAction } from './types'; + +/** + * Helper to format action reference for workflow files + * + * @param action - GitHub action configuration + * @returns Formatted action reference (e.g., "actions/checkout\@93cb6efe... # v5.0.1") + * + * @example + * ```typescript + * formatActionRef(githubActionsVersions().github.checkout) + * // Returns: "actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1" + * ``` + */ +export const formatActionRef = (action: GitHubAction): string => { + return `${action.name}@${action.sha} # ${action.version}`; +}; diff --git a/tools/generators/plugin/src/github-action-versions/get-flattened-actions.ts b/tools/generators/plugin/src/github-action-versions/get-flattened-actions.ts new file mode 100644 index 0000000..78878f4 --- /dev/null +++ b/tools/generators/plugin/src/github-action-versions/get-flattened-actions.ts @@ -0,0 +1,35 @@ +import { formatActionRef } from './format-action-ref'; + +import { githubActionsVersions } from '.'; + +/** + * Get all actions as a flat object for easy template access + * + * @returns Flattened object with all action references + * + * @example + * ```typescript + * const actions = getFlattenedActions(); + * // Use in template: <%= actions.checkout %> + * // Outputs: "actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1" + * ``` + */ +export const getFlattenedActions = (): Record => { + const versions = githubActionsVersions(); + + return { + // GitHub official actions + checkout: formatActionRef(versions.github.checkout), + setupNode: formatActionRef(versions.github.setupNode), + cache: formatActionRef(versions.github.cache), + githubScript: formatActionRef(versions.github.githubScript), + codeqlUploadSarif: formatActionRef(versions.github.codeqlUploadSarif), + + // Third-party actions + setupBun: formatActionRef(versions.thirdParty.setupBun), + trivyAction: formatActionRef(versions.thirdParty.trivyAction), + ghPages: formatActionRef(versions.thirdParty.ghPages), + codecov: formatActionRef(versions.thirdParty.codecov), + releasePlease: formatActionRef(versions.thirdParty.releasePlease), + }; +}; diff --git a/tools/generators/plugin/src/github-action-versions/index.ts b/tools/generators/plugin/src/github-action-versions/index.ts new file mode 100644 index 0000000..82c6bb0 --- /dev/null +++ b/tools/generators/plugin/src/github-action-versions/index.ts @@ -0,0 +1,90 @@ +/** + * GitHub Actions versions with pinned SHAs for security and reproducibility. + * + * All action versions are pinned to specific commit SHAs to prevent supply chain attacks + * and ensure consistent behavior across all generated plugins. + * + * Update Policy: + * - Review and update quarterly or when security advisories are published + * - Always verify SHA matches the expected version tag + * - Test updates in a single plugin before regenerating all plugins + */ + +import { GitHubActionsVersions } from './types'; + +/** + * Centralized GitHub Actions version management + * + * @example + * ```typescript + * const actions = githubActionsVersions(); + * // Use in template: actions.github.checkout.sha + * // Outputs: "93cb6efe18208431cddfb8368fd83d5badbf9bfd" + * ``` + */ +export const githubActionsVersions = (): GitHubActionsVersions => ({ + github: { + checkout: { + name: 'actions/checkout', + sha: '93cb6efe18208431cddfb8368fd83d5badbf9bfd', + version: 'v5.0.1', + description: 'Checkout repository code', + }, + setupNode: { + name: 'actions/setup-node', + sha: '2028fbc5c25fe9cf00d9f06a71cc4710d4507903', + version: 'v6.0.0', + description: 'Setup Node.js environment', + }, + cache: { + name: 'actions/cache', + sha: '0057852bfaa89a56745cba8c7296529d2fc39830', + version: 'v4.3.0', + description: 'Cache dependencies and build outputs', + }, + githubScript: { + name: 'actions/github-script', + sha: 'ed597411d8f924073f98dfc5c65a23a2325f34cd', + version: 'v8.0.0', + description: 'Run GitHub API scripts', + }, + codeqlUploadSarif: { + name: 'github/codeql-action/upload-sarif', + sha: 'c3d42c5d08633d8b33635fbd94b000a0e2585b3c', + version: 'v3.31.4', + description: 'Upload SARIF security scan results', + }, + }, + thirdParty: { + setupBun: { + name: 'oven-sh/setup-bun', + sha: '735343b667d3e6f658f44d0eca948eb6282f2b76', + version: 'v2.0.2', + description: 'Setup Bun runtime environment', + }, + trivyAction: { + name: 'aquasecurity/trivy-action', + sha: 'b6643a29fecd7f34b3597bc6acb0a98b03d33ff8', + version: '0.33.1', + description: 'Run Trivy security scanner', + }, + ghPages: { + name: 'peaceiris/actions-gh-pages', + sha: '4f9cc6602d3f66b9c108549d475ec49e8ef4d45e', + version: 'v4.0.0', + description: 'Deploy to GitHub Pages', + }, + codecov: { + name: 'codecov/codecov-action', + sha: '5a1091511ad55cbe89839c7260b706298ca349f7', + version: 'v5.5.1', + description: 'Upload coverage reports to Codecov', + }, + releasePlease: { + name: 'googleapis/release-please-action', + sha: '7987652d64b4581673a76e33ad5e98e3dd56832f', + version: 'v4.1.3', + description: 'Automated releases with conventional commits', + }, + }, +}); diff --git a/tools/generators/plugin/src/github-action-versions/types.ts b/tools/generators/plugin/src/github-action-versions/types.ts new file mode 100644 index 0000000..54e8589 --- /dev/null +++ b/tools/generators/plugin/src/github-action-versions/types.ts @@ -0,0 +1,29 @@ +export interface GitHubAction { + /** Action identifier (e.g., "actions/checkout") */ + name: string; + /** Pinned commit SHA */ + sha: string; + /** Semantic version for reference (e.g., "v4.2.2") */ + version: string; + /** Short description of the action */ + description: string; +} + +export interface GitHubActionsVersions { + /** GitHub's official actions */ + github: { + checkout: GitHubAction; + setupNode: GitHubAction; + cache: GitHubAction; + githubScript: GitHubAction; + codeqlUploadSarif: GitHubAction; + }; + /** Third-party actions */ + thirdParty: { + setupBun: GitHubAction; + trivyAction: GitHubAction; + ghPages: GitHubAction; + codecov: GitHubAction; + releasePlease: GitHubAction; + }; +} diff --git a/tools/generators/plugin/src/normalize-options.ts b/tools/generators/plugin/src/normalize-options.ts new file mode 100644 index 0000000..5f4b1f1 --- /dev/null +++ b/tools/generators/plugin/src/normalize-options.ts @@ -0,0 +1,56 @@ +import { Tree, names } from '@nx/devkit'; + +import type { PluginGeneratorSchema } from '../schema'; + +import type { PluginDependencies } from './dependencies'; +import { dependencies, devDependencies } from './dependencies'; + +export interface NormalizedOptions extends PluginGeneratorSchema { + projectName: string; + projectRoot: string; + projectDirectory: string; + parsedTags: string[]; + pluginName: string; + packageName: string; + devDependencies: PluginDependencies; + dependencies: PluginDependencies; +} + +const pascalCase = (str: string): string => { + return str.replace(/(^\w|-\w)/g, (match) => match.replace('-', '').toUpperCase()); +}; + +export const normalizeOptions = (tree: Tree, options: PluginGeneratorSchema): NormalizedOptions => { + const name = names(options.name).fileName; + const projectDirectory = options.directory || 'packages'; + + // Build project name with opencode- prefix and -plugin suffix + let projectName = name; + + // Add opencode- prefix if not present + if (!projectName.startsWith('opencode-')) { + projectName = `opencode-${projectName}`; + } + + // Add -plugin suffix if not present + if (!projectName.endsWith('-plugin')) { + projectName = `${projectName}-plugin`; + } + + const projectRoot = `${projectDirectory}/${projectName}`; + const parsedTags: string[] = []; + const pluginName = pascalCase(projectName); + const packageName = `@pantheon-org/${projectName}`; + + return { + ...options, + projectName, + projectRoot, + projectDirectory, + parsedTags, + pluginName, + packageName, + devDependencies: devDependencies(), + dependencies: dependencies, + }; +}; diff --git a/tools/generators/plugin/src/update-ts-config-paths.ts b/tools/generators/plugin/src/update-ts-config-paths.ts new file mode 100644 index 0000000..8aaddfc --- /dev/null +++ b/tools/generators/plugin/src/update-ts-config-paths.ts @@ -0,0 +1,16 @@ +import { Tree, updateJson } from '@nx/devkit'; + +export const updateTsconfigPaths = (tree: Tree, packageName: string, projectRoot: string): void => { + updateJson(tree, 'tsconfig.base.json', (json) => { + if (!json.compilerOptions) { + json.compilerOptions = {}; + } + if (!json.compilerOptions.paths) { + json.compilerOptions.paths = {}; + } + + json.compilerOptions.paths[packageName] = [`${projectRoot}/src/index.ts`]; + + return json; + }); +}; diff --git a/tools/generators/plugin/tsconfig.json b/tools/generators/plugin/tsconfig.json new file mode 100644 index 0000000..f0401a8 --- /dev/null +++ b/tools/generators/plugin/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "typeRoots": ["../../../types", "../../../node_modules/@types"], + "types": ["node"] + }, + "include": ["**/*.ts", "**/*.tsx"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/tools/generators/plugin/tsconfig.test.json__template__ b/tools/generators/plugin/tsconfig.test.json__template__ new file mode 100644 index 0000000..a7716e5 --- /dev/null +++ b/tools/generators/plugin/tsconfig.test.json__template__ @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "bundler", + "declaration": false, + "sourceMap": false, + "strict": true, + "esModuleInterop": true, + "resolveJsonModule": true, + "types": ["bun-types/test", "bun-types/test-globals", "node"], + "typeRoots": ["./types", "./node_modules", "./node_modules/@types"], + "files": ["../../types/bun-test-shim.d.ts"], + "baseUrl": "." + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.spec.ts", "src/**/*.test.ts"], + "exclude": ["node_modules", "dist"] +}