diff --git a/README.md b/README.md index 6a23bd7..19bb91a 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,7 @@ Create `~/.config/opencode/opencode-auth-sync.json`: | `secretName` | string | `OPENCODE_AUTH_JSON` | GitHub secret name | | `repositories` | string[] | `[]` | Repositories to sync (`owner/repo` format) | | `debounceMs` | number | `1000` | Debounce delay for file changes | +| `authFileHashes` | object | (auto-managed) | Per-repository SHA-256 hashes of last synced auth.json (managed by plugin) | ## Prerequisites @@ -64,8 +65,11 @@ Create `~/.config/opencode/opencode-auth-sync.json`: 1. Plugin watches `~/.local/share/opencode/auth.json` for changes 2. When tokens refresh, the file updates -3. Plugin syncs the entire auth file to configured repositories via `gh secret set` -4. Toast notifications show sync status +3. Plugin computes a SHA-256 hash of the file content and compares it against the stored hash +4. If the hash differs (content actually changed), syncs to configured repositories via `gh secret set` +5. Toast notifications show sync status + +The hash-based change detection reduces unnecessary GitHub API calls when file metadata changes but content remains the same. ## Using the Secret in GitHub Actions diff --git a/index.ts b/index.ts index 1e50102..043f96c 100644 --- a/index.ts +++ b/index.ts @@ -1,8 +1,8 @@ import type { Plugin, PluginInput } from "@opencode-ai/plugin" -import { loadConfig, expandPath } from "./lib/config" +import { loadConfig, expandPath, getConfigPath, saveConfig } from "./lib/config" import { watchCredentials } from "./lib/watcher" import { syncToRepositories, verifyGhAuth } from "./lib/sync" -import type { OpenCodeAuth } from "./lib/types" +import type { AuthSyncConfig, OpenCodeAuth } from "./lib/types" const PLUGIN_NAME = "opencode-auth-sync" @@ -50,26 +50,50 @@ export const OpenCodeAuthSyncPlugin: Plugin = async ({ $, client, directory }: P } const credentialsPath = expandPath(config.credentialsPath) - let isFirstSync = true + const configPath = getConfigPath(directory) + let currentHashes: Record = { ...config.authFileHashes } let stopWatching: (() => void) | null = null - const handleCredentialsChange = async (_credentials: OpenCodeAuth, raw: string) => { - const action = isFirstSync ? "Initial sync" : "Syncing" - showToast(`${action} to ${config.repositories.length} repo(s)...`, "info", 2000) + const persistHashes = async (hashes: Record) => { + if (!configPath) return - const summary = await syncToRepositories($, config.repositories, config.secretName, raw) + try { + currentHashes = { ...hashes } + const updatedConfig: AuthSyncConfig = { ...config, authFileHashes: currentHashes } + await saveConfig(configPath, updatedConfig) + } catch { + showToast("Could not save config, sync may repeat on restart", "warning", 3000) + } + } - if (summary.failed === 0) { - showToast(`Synced to ${summary.successful} repo(s)`, "success", 3000) - } else { - const failedRepos = summary.results - .filter((r) => !r.success) - .map((r) => r.repository) - .join(", ") - showToast(`${summary.successful} synced, ${summary.failed} failed: ${failedRepos}`, "warning", 5000) + const handleCredentialsChange = async (_credentials: OpenCodeAuth, raw: string, hash: string) => { + const reposNeedingSync = config.repositories.filter( + (repo) => currentHashes[repo] !== hash + ) + + if (reposNeedingSync.length === 0) { + return + } + + const isInitialSync = Object.keys(currentHashes).length === 0 + const action = isInitialSync ? "Initial sync" : "Syncing" + showToast(`${action} to ${reposNeedingSync.length} repo(s)...`, "info", 2000) + + const summary = await syncToRepositories($, reposNeedingSync, config.secretName, raw) + + const updatedHashes = { ...currentHashes } + for (const result of summary.results) { + if (result.success) { + updatedHashes[result.repository] = hash + } else { + showToast(`Failed to sync to ${result.repository}: ${result.error}`, "error", 5000) + } } - isFirstSync = false + if (summary.successful > 0) { + await persistHashes(updatedHashes) + showToast(`Synced to ${summary.successful} repo(s)`, "success", 3000) + } } const handleError = async (error: Error) => { @@ -82,7 +106,9 @@ export const OpenCodeAuthSyncPlugin: Plugin = async ({ $, client, directory }: P onCredentialsChange: handleCredentialsChange, onError: handleError, }, - config.debounceMs + { + debounceMs: config.debounceMs, + } ) return {} diff --git a/lib/config.test.ts b/lib/config.test.ts index 090b9e1..9df7e9a 100644 --- a/lib/config.test.ts +++ b/lib/config.test.ts @@ -1,8 +1,8 @@ import { describe, test, expect, beforeEach, afterEach } from "bun:test" -import { mkdirSync, writeFileSync, rmSync, existsSync } from "fs" +import { mkdirSync, writeFileSync, rmSync, existsSync, readFileSync } from "fs" import { join } from "path" import { tmpdir } from "os" -import { loadPluginConfigSync, mergeConfig, DEFAULT_CONFIG } from "./config" +import { loadPluginConfigSync, mergeConfig, DEFAULT_CONFIG, saveConfig, getConfigPath } from "./config" import type { AuthSyncConfig } from "./types" describe("loadPluginConfigSync", () => { @@ -239,3 +239,192 @@ describe("DEFAULT_CONFIG", () => { expect(DEFAULT_CONFIG.debounceMs).toBe(1000) }) }) + +describe("saveConfig", () => { + const testDir = join(tmpdir(), `opencode-auth-sync-save-${Date.now()}`) + const testConfigPath = join(testDir, "config.json") + + beforeEach(() => { + mkdirSync(testDir, { recursive: true }) + }) + + afterEach(() => { + if (existsSync(testDir)) { + rmSync(testDir, { recursive: true }) + } + }) + + test("writes config to file with proper JSON formatting", async () => { + const config: Partial = { + enabled: true, + repositories: ["org/repo"], + secretName: "TEST_SECRET", + } + + await saveConfig(testConfigPath, config) + + const content = readFileSync(testConfigPath, "utf-8") + const parsed = JSON.parse(content) + + expect(parsed).toEqual(config) + expect(content).toContain("\n") + }) + + test("saves config with authFileHashes field", async () => { + const config: Partial = { + enabled: true, + repositories: ["org/repo"], + authFileHashes: { "org/repo": "abc123def456" }, + } + + await saveConfig(testConfigPath, config) + + const content = readFileSync(testConfigPath, "utf-8") + const parsed = JSON.parse(content) + + expect(parsed.authFileHashes).toEqual({ "org/repo": "abc123def456" }) + }) + + test("overwrites existing config file", async () => { + const oldConfig = { enabled: false, repositories: ["old/repo"] } + writeFileSync(testConfigPath, JSON.stringify(oldConfig)) + + const newConfig: Partial = { + enabled: true, + repositories: ["new/repo"], + authFileHashes: { "new/repo": "newhash123" }, + } + + await saveConfig(testConfigPath, newConfig) + + const content = readFileSync(testConfigPath, "utf-8") + const parsed = JSON.parse(content) + + expect(parsed.enabled).toBe(true) + expect(parsed.repositories).toEqual(["new/repo"]) + expect(parsed.authFileHashes).toEqual({ "new/repo": "newhash123" }) + }) +}) + +describe("getConfigPath", () => { + const testDir = join(tmpdir(), `opencode-auth-sync-path-${Date.now()}`) + const projectConfigPath = join(testDir, "opencode-auth-sync.json") + + beforeEach(() => { + mkdirSync(testDir, { recursive: true }) + }) + + afterEach(() => { + if (existsSync(testDir)) { + rmSync(testDir, { recursive: true }) + } + }) + + test("returns project config path when it exists", () => { + writeFileSync(projectConfigPath, JSON.stringify({ enabled: true })) + + const result = getConfigPath(testDir) + expect(result).toBe(projectConfigPath) + }) + + test("returns string when some config file exists", () => { + const result = getConfigPath(testDir) + expect(typeof result === "string" || result === null).toBe(true) + }) +}) + +describe("authFileHashes in config", () => { + const testDir = join(tmpdir(), `opencode-auth-sync-hash-${Date.now()}`) + const testConfigPath = join(testDir, "config.json") + + beforeEach(() => { + mkdirSync(testDir, { recursive: true }) + }) + + afterEach(() => { + if (existsSync(testDir)) { + rmSync(testDir, { recursive: true }) + } + }) + + test("loads config with authFileHashes field", () => { + const config = { + enabled: true, + repositories: ["org/repo"], + authFileHashes: { "org/repo": "sha256hashvalue123" }, + } + writeFileSync(testConfigPath, JSON.stringify(config)) + + const result = loadPluginConfigSync(testConfigPath) + + expect(result.authFileHashes).toEqual({ "org/repo": "sha256hashvalue123" }) + }) + + test("backward compatibility: loads config without authFileHashes field", () => { + const config = { + enabled: true, + repositories: ["org/repo"], + secretName: "SECRET", + } + writeFileSync(testConfigPath, JSON.stringify(config)) + + const result = loadPluginConfigSync(testConfigPath) + + expect(result.authFileHashes).toBeUndefined() + expect(result.enabled).toBe(true) + expect(result.repositories).toEqual(["org/repo"]) + }) + + test("mergeConfig preserves authFileHashes from existing config", () => { + const existing: Partial = { + enabled: true, + repositories: ["old/repo"], + authFileHashes: { "old/repo": "existinghash" }, + } + const updates: Partial = { + repositories: ["new/repo"], + } + + const result = mergeConfig(existing, updates) + + expect(result.authFileHashes).toEqual({ "old/repo": "existinghash" }) + expect(result.repositories).toEqual(["new/repo"]) + }) + + test("mergeConfig allows updating authFileHashes", () => { + const existing: Partial = { + enabled: true, + authFileHashes: { "org/repo": "oldhash" }, + } + const updates: Partial = { + authFileHashes: { "org/repo": "newhash", "org/repo2": "hash2" }, + } + + const result = mergeConfig(existing, updates) + + expect(result.authFileHashes).toEqual({ "org/repo": "newhash", "org/repo2": "hash2" }) + }) + + test("full workflow: load, update hashes, save, reload", async () => { + const initialConfig = { + enabled: true, + repositories: ["org/repo"], + secretName: "SECRET", + } + writeFileSync(testConfigPath, JSON.stringify(initialConfig)) + + const loaded = loadPluginConfigSync(testConfigPath) + expect(loaded.authFileHashes).toBeUndefined() + + const updated: Partial = { + ...loaded, + authFileHashes: { "org/repo": "newlycomputedhash" }, + } + await saveConfig(testConfigPath, updated) + + const reloaded = loadPluginConfigSync(testConfigPath) + expect(reloaded.authFileHashes).toEqual({ "org/repo": "newlycomputedhash" }) + expect(reloaded.enabled).toBe(true) + expect(reloaded.repositories).toEqual(["org/repo"]) + }) +}) diff --git a/lib/config.ts b/lib/config.ts index 41a4994..1427ddd 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -1,4 +1,4 @@ -import { readFile } from "fs/promises" +import { readFile, writeFile } from "fs/promises" import { existsSync, readFileSync } from "fs" import { homedir } from "os" import { join } from "path" @@ -62,3 +62,26 @@ export function expandPath(path: string): string { } return path } + +export function getConfigPath(projectDir?: string): string | null { + const locations = [ + projectDir && join(projectDir, "opencode-auth-sync.json"), + join(homedir(), ".config", "opencode", "opencode-auth-sync.json"), + ].filter(Boolean) as string[] + + for (const configPath of locations) { + if (existsSync(configPath)) { + return configPath + } + } + + return null +} + +export async function saveConfig( + configPath: string, + config: Partial +): Promise { + const content = JSON.stringify(config, null, 2) + await writeFile(configPath, content, "utf-8") +} diff --git a/lib/types.ts b/lib/types.ts index 66b882c..e021ae7 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -4,6 +4,7 @@ export interface AuthSyncConfig { secretName: string repositories: string[] debounceMs?: number + authFileHashes?: Record } export interface OAuthEntry { diff --git a/lib/watcher.test.ts b/lib/watcher.test.ts new file mode 100644 index 0000000..902251a --- /dev/null +++ b/lib/watcher.test.ts @@ -0,0 +1,284 @@ +import { describe, test, expect, beforeEach, afterEach } from "bun:test" +import { mkdirSync, writeFileSync, rmSync, existsSync } from "fs" +import { join } from "path" +import { tmpdir } from "os" +import { computeHash, watchCredentials } from "./watcher" +import type { OpenCodeAuth } from "./types" + +describe("computeHash", () => { + test("returns consistent SHA-256 hash for same content", () => { + const content = '{"anthropic":{"type":"oauth","access":"token123"}}' + const hash1 = computeHash(content) + const hash2 = computeHash(content) + + expect(hash1).toBe(hash2) + expect(hash1).toHaveLength(64) + }) + + test("returns different hash for different content", () => { + const content1 = '{"anthropic":{"access":"token1"}}' + const content2 = '{"anthropic":{"access":"token2"}}' + + const hash1 = computeHash(content1) + const hash2 = computeHash(content2) + + expect(hash1).not.toBe(hash2) + }) + + test("returns valid hex string", () => { + const hash = computeHash("test content") + expect(hash).toMatch(/^[a-f0-9]{64}$/) + }) + + test("handles empty string", () => { + const hash = computeHash("") + expect(hash).toHaveLength(64) + expect(hash).toMatch(/^[a-f0-9]{64}$/) + }) + + test("handles unicode content", () => { + const hash = computeHash('{"name":"日本語","emoji":"🎉"}') + expect(hash).toHaveLength(64) + expect(hash).toMatch(/^[a-f0-9]{64}$/) + }) + + test("whitespace-only differences produce different hashes", () => { + const compact = '{"key":"value"}' + const pretty = '{ "key": "value" }' + + expect(computeHash(compact)).not.toBe(computeHash(pretty)) + }) +}) + +describe("watchCredentials hash comparison", () => { + let testDir: string + let authFilePath: string + + beforeEach(() => { + testDir = join(tmpdir(), `watcher-test-${Date.now()}-${Math.random()}`) + authFilePath = join(testDir, "auth.json") + mkdirSync(testDir, { recursive: true }) + }) + + afterEach(() => { + if (existsSync(testDir)) { + rmSync(testDir, { recursive: true }) + } + }) + + test("triggers callback on initial file when no stored hash", async () => { + const authContent = '{"anthropic":{"type":"oauth","access":"initial"}}' + writeFileSync(authFilePath, authContent) + + let callCount = 0 + let receivedRaw = "" + let receivedHash = "" + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: (_credentials: OpenCodeAuth, raw: string, hash: string) => { + callCount++ + receivedRaw = raw + receivedHash = hash + }, + onError: () => {}, + }, + { debounceMs: 50 } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(callCount).toBe(1) + expect(receivedRaw).toBe(authContent) + expect(receivedHash).toBe(computeHash(authContent)) + }) + + test("skips callback when content hash matches stored hash", async () => { + const authContent = '{"anthropic":{"type":"oauth","access":"unchanged"}}' + const storedHash = computeHash(authContent) + writeFileSync(authFilePath, authContent) + + let callCount = 0 + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: () => { + callCount++ + }, + onError: () => {}, + }, + { debounceMs: 50, storedHash } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(callCount).toBe(0) + }) + + test("triggers callback when content hash differs from stored hash", async () => { + const oldContent = '{"anthropic":{"access":"old"}}' + const newContent = '{"anthropic":{"access":"new"}}' + const storedHash = computeHash(oldContent) + + writeFileSync(authFilePath, newContent) + + let callCount = 0 + let receivedRaw = "" + let receivedHash = "" + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: (_credentials: OpenCodeAuth, raw: string, hash: string) => { + callCount++ + receivedRaw = raw + receivedHash = hash + }, + onError: () => {}, + }, + { debounceMs: 50, storedHash } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(callCount).toBe(1) + expect(receivedRaw).toBe(newContent) + expect(receivedHash).toBe(computeHash(newContent)) + }) + + test("skips duplicate changes with same content", async () => { + const authContent = '{"test":"data"}' + writeFileSync(authFilePath, authContent) + + let callCount = 0 + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: () => { + callCount++ + }, + onError: () => {}, + }, + { debounceMs: 50 } + ) + + await new Promise((r) => setTimeout(r, 200)) + expect(callCount).toBe(1) + + writeFileSync(authFilePath, authContent) + await new Promise((r) => setTimeout(r, 200)) + expect(callCount).toBe(1) + + writeFileSync(authFilePath, authContent) + await new Promise((r) => setTimeout(r, 200)) + + stop() + + expect(callCount).toBe(1) + }) + + test("provides correct hash to callback on initial read", async () => { + const content = '{"version":1}' + const expectedHash = computeHash(content) + + writeFileSync(authFilePath, content) + + let receivedHash = "" + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: (_credentials: OpenCodeAuth, _raw: string, hash: string) => { + receivedHash = hash + }, + onError: () => {}, + }, + { debounceMs: 50 } + ) + + await new Promise((r) => setTimeout(r, 800)) + stop() + + expect(receivedHash).toBe(expectedHash) + }) + + test("calls onError for invalid JSON", async () => { + writeFileSync(authFilePath, "not valid json {{{") + + let changeCount = 0 + let errorCount = 0 + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: () => { + changeCount++ + }, + onError: () => { + errorCount++ + }, + }, + { debounceMs: 50 } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(changeCount).toBe(0) + expect(errorCount).toBe(1) + }) + + test("passes parsed credentials object to callback", async () => { + const authData: OpenCodeAuth = { + anthropic: { type: "oauth", access: "token123", refresh: "refresh123", expires: 1234567890 }, + } + writeFileSync(authFilePath, JSON.stringify(authData)) + + let receivedCredentials: OpenCodeAuth = {} + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: (credentials: OpenCodeAuth) => { + receivedCredentials = credentials + }, + onError: () => {}, + }, + { debounceMs: 50 } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(receivedCredentials).toEqual(authData) + }) + + test("backward compatibility: works with storedHash undefined", async () => { + const authContent = '{"test":"backward-compat"}' + writeFileSync(authFilePath, authContent) + + let callCount = 0 + + const stop = watchCredentials( + authFilePath, + { + onCredentialsChange: () => { + callCount++ + }, + onError: () => {}, + }, + { debounceMs: 50, storedHash: undefined } + ) + + await new Promise((r) => setTimeout(r, 200)) + stop() + + expect(callCount).toBe(1) + }) +}) diff --git a/lib/watcher.ts b/lib/watcher.ts index 71d5f72..f90d701 100644 --- a/lib/watcher.ts +++ b/lib/watcher.ts @@ -1,19 +1,30 @@ import chokidar from "chokidar" +import { createHash } from "crypto" import { readFile } from "fs/promises" import type { OpenCodeAuth } from "./types" +export function computeHash(content: string): string { + return createHash("sha256").update(content).digest("hex") +} + export interface WatcherCallbacks { - onCredentialsChange: (credentials: OpenCodeAuth, raw: string) => void + onCredentialsChange: (credentials: OpenCodeAuth, raw: string, hash: string) => void onError: (error: Error) => void } +export interface WatcherOptions { + debounceMs?: number + storedHash?: string +} + export function watchCredentials( credentialsPath: string, callbacks: WatcherCallbacks, - debounceMs: number = 1000 + options: WatcherOptions = {} ): () => void { + const { debounceMs = 1000, storedHash } = options let debounceTimer: ReturnType | null = null - let lastContent: string | null = null + let lastHash: string | null = storedHash ?? null const watcher = chokidar.watch(credentialsPath, { persistent: true, @@ -27,14 +38,15 @@ export function watchCredentials( const handleChange = async () => { try { const content = await readFile(credentialsPath, "utf-8") + const currentHash = computeHash(content) - if (content === lastContent) { + if (currentHash === lastHash) { return } - lastContent = content + lastHash = currentHash const credentials = JSON.parse(content) as OpenCodeAuth - callbacks.onCredentialsChange(credentials, content) + callbacks.onCredentialsChange(credentials, content, currentHash) } catch (error) { callbacks.onError(error as Error) } diff --git a/schema.json b/schema.json index 2e54be0..47f9fa3 100644 --- a/schema.json +++ b/schema.json @@ -36,6 +36,13 @@ "default": 1000, "minimum": 100, "description": "Debounce delay in milliseconds for file changes" + }, + "authFileHashes": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "description": "Per-repository SHA-256 hashes of last synced auth.json (managed by plugin)" } }, "required": ["repositories"],