diff --git a/content/docs/server/edge.mdx b/content/docs/server/edge.mdx new file mode 100644 index 00000000..cda0a591 --- /dev/null +++ b/content/docs/server/edge.mdx @@ -0,0 +1,214 @@ +--- +title: "Edge Runtime" +description: Edge Runtime Support +--- + +ObjectQL runs natively on edge runtimes. The `@objectql/edge-adapter` package provides runtime detection, capability validation, and automatic driver binding resolution for serverless edge environments. + +## 1. Supported Runtimes + +| Runtime | Detection Signal | Default Driver | +|----------------------|-------------------------------------------|---------------------------------| +| Cloudflare Workers | `globalThis.caches` + `WebSocketPair` | `@objectql/driver-sqlite-wasm` | +| Deno Deploy | `globalThis.Deno` | `@objectql/driver-pg-wasm` | +| Vercel Edge | `globalThis.EdgeRuntime` | `@objectql/driver-memory` | +| Bun | `globalThis.Bun` | `@objectql/driver-sqlite-wasm` | +| Node.js | Default fallback | `@objectql/driver-sql` | + +## 2. Installation + +```bash +pnpm add @objectql/edge-adapter +``` + +## 3. Basic Configuration + +Register the `EdgeAdapterPlugin` in your kernel. It auto-detects the runtime and resolves driver bindings. + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new EdgeAdapterPlugin() + ] +}); + +await kernel.start(); +``` + +## 4. Runtime Detection + +The adapter inspects `globalThis` to detect the active runtime. Detection checks run in order from most specific to least specific, preventing false positives. + +```typescript +import { detectRuntime } from '@objectql/edge-adapter'; + +const runtime = detectRuntime(); +// => 'cloudflare-workers' | 'deno-deploy' | 'vercel-edge' | 'bun' | 'node' +``` + +You can override auto-detection by passing an explicit runtime: + +```typescript +new EdgeAdapterPlugin({ + runtime: 'cloudflare-workers', +}); +``` + +## 5. Capability Validation + +Before initializing drivers, validate that the runtime meets your requirements. The plugin throws on startup if validation fails. + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; + +new EdgeAdapterPlugin({ + requirements: { + wasm: true, + persistentStorage: true, + webSocket: true, + minExecutionTime: 30000, + }, +}); +``` + +You can also validate programmatically: + +```typescript +import { detectRuntime, validateCapabilities } from '@objectql/edge-adapter'; + +const runtime = detectRuntime(); +const result = validateCapabilities(runtime, { + wasm: true, + persistentStorage: true, +}); + +if (!result.valid) { + console.error('Missing:', result.missing); +} +``` + +## 6. Driver Bindings + +Each runtime has a recommended default driver. Override bindings for custom datasource configurations. + +```typescript +new EdgeAdapterPlugin({ + bindings: { + default: { + driver: '@objectql/driver-sqlite-wasm', + binding: 'DB', + config: { pragma: { journal_mode: 'WAL' } }, + }, + analytics: { + driver: '@objectql/driver-memory', + config: { maxSize: 5000 }, + }, + }, +}); +``` + +When no explicit bindings are provided, the adapter generates a `default` binding using the recommended driver for the detected runtime. + +## 7. Platform Examples + +### Cloudflare Workers + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new EdgeAdapterPlugin({ + runtime: 'cloudflare-workers', + bindings: { + default: { + driver: '@objectql/driver-sqlite-wasm', + binding: 'DB', // Cloudflare D1 binding name + config: {}, + }, + }, + requirements: { + wasm: true, + persistentStorage: true, + }, + }) + ] +}); + +export default { + async fetch(request: Request, env: Record) { + await kernel.start(); + // Handle request... + return new Response('OK'); + }, +}; +``` + +### Deno Deploy + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new EdgeAdapterPlugin({ + runtime: 'deno-deploy', + bindings: { + default: { + driver: '@objectql/driver-pg-wasm', + config: { + connectionString: Deno.env.get('DATABASE_URL'), + }, + }, + }, + }) + ] +}); + +await kernel.start(); + +Deno.serve(async (request: Request) => { + // Handle request... + return new Response('OK'); +}); +``` + +### Vercel Edge + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; +import { createKernel } from '@objectstack/runtime'; + +export const config = { runtime: 'edge' }; + +const kernel = createKernel({ + plugins: [ + new EdgeAdapterPlugin({ + runtime: 'vercel-edge', + maxExecutionTime: 25000, + requestScoped: true, + }) + ] +}); + +export default async function handler(request: Request) { + await kernel.start(); + // Handle request... + return new Response('OK'); +} +``` + +## 8. Plugin Options + +| Option | Type | Default | Description | +|-------------------|-------------------------|----------------|-------------------------------------------| +| `runtime` | `EdgeRuntime` | Auto-detected | Override the detected runtime | +| `bindings` | `Record` | Default driver | Explicit driver bindings per datasource | +| `maxExecutionTime`| `number` | From runtime | Override max execution time (ms) | +| `requestScoped` | `boolean` | `true` | Enable request-scoped connections | +| `requirements` | `CapabilityRequirement` | `undefined` | Capability requirements to validate | diff --git a/content/docs/server/meta.json b/content/docs/server/meta.json index 66168ff1..42e5d995 100644 --- a/content/docs/server/meta.json +++ b/content/docs/server/meta.json @@ -6,6 +6,8 @@ "integration", "security", "microservices", - "plugins" + "plugins", + "edge", + "sync" ] } diff --git a/content/docs/server/sync.mdx b/content/docs/server/sync.mdx new file mode 100644 index 00000000..59f03752 --- /dev/null +++ b/content/docs/server/sync.mdx @@ -0,0 +1,324 @@ +--- +title: "Offline Sync" +description: Offline-First Sync +--- + +ObjectQL provides a complete offline-first sync solution. The client-side `@objectql/plugin-sync` package records mutations locally, while the server-side `@objectql/protocol-sync` package handles conflict detection, version tracking, and delta computation. + +## 1. Architecture Overview + +``` +┌─────────────────────────────────┐ ┌──────────────────────────────────┐ +│ Client (Edge/Browser) │ │ Server │ +│ │ │ │ +│ ┌────────────┐ ┌───────────┐ │ │ ┌─────────────┐ ┌───────────┐ │ +│ │ Mutation │ │ Sync │ │ HTTP │ │ Sync │ │ Change │ │ +│ │ Logger │──│ Engine │─┼──────┼──│ Handler │──│ Log │ │ +│ └────────────┘ └───────────┘ │ │ └─────────────┘ └───────────┘ │ +│ ┌───────────┐ │ │ ┌─────────────┐ │ +│ │ Conflict │ │ │ │ Version │ │ +│ │ Resolver │ │ │ │ Store │ │ +│ └───────────┘ │ │ └─────────────┘ │ +└─────────────────────────────────┘ └──────────────────────────────────┘ +``` + +## 2. Client-Side Setup + +Install the sync plugin and register it with your kernel. + +```bash +pnpm add @objectql/plugin-sync +``` + +```typescript +import { SyncPlugin } from '@objectql/plugin-sync'; +import { createKernel } from '@objectstack/runtime'; + +const transport = { + async push(request) { + const res = await fetch('/api/sync/push', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(request), + }); + return res.json(); + }, +}; + +const kernel = createKernel({ + plugins: [ + new SyncPlugin({ + clientId: 'device-abc-123', + transport, + defaultConfig: { + enabled: true, + strategy: 'last-write-wins', + direction: 'bidirectional', + debounce_ms: 2000, + batch_size: 50, + }, + }) + ] +}); + +await kernel.start(); +``` + +## 3. Server-Side Setup + +Install the sync protocol and register the server-side handler. + +```bash +pnpm add @objectql/protocol-sync +``` + +```typescript +import { SyncProtocolPlugin, type RecordResolver } from '@objectql/protocol-sync'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new SyncProtocolPlugin({ + endpoint: { + enabled: true, + path: '/api/sync', + maxMutationsPerRequest: 100, + changeLogRetentionDays: 30, + }, + conflictFields: new Map([ + ['task', ['title', 'status', 'assignee']], + ['project', ['name', 'budget']], + ]), + }) + ] +}); + +await kernel.start(); +``` + +Expose the sync endpoint in your HTTP handler: + +```typescript +import { SyncHandler, type RecordResolver } from '@objectql/protocol-sync'; + +const resolver: RecordResolver = { + async getRecord(objectName, recordId) { + return db.find(objectName, recordId); + }, + async applyMutation(mutation, serverVersion) { + if (mutation.operation === 'create') { + await db.create(mutation.objectName, { + ...mutation.data, + _id: mutation.recordId, + _version: serverVersion, + }); + } else if (mutation.operation === 'update') { + await db.update(mutation.objectName, mutation.recordId, { + ...mutation.data, + _version: serverVersion, + }); + } else if (mutation.operation === 'delete') { + await db.delete(mutation.objectName, mutation.recordId); + } + }, +}; + +// In your route handler: +app.post('/api/sync/push', async (req, res) => { + const handler = kernel.syncProtocol.handler; + const response = await handler.handlePush(req.body, resolver); + res.json(response); +}); +``` + +## 4. Conflict Resolution Strategies + +Configure conflict resolution per object or globally via `SyncConfig.strategy`. + +### Last-Write-Wins (LWW) + +The default strategy. Compares client and server timestamps — the most recent mutation wins. + +```typescript +{ + strategy: 'last-write-wins' +} +``` + +### CRDT (Field-Level Merge) + +Performs field-level LWW-Register merge. Non-conflicting fields from the client are merged with the server record. Conflicting fields retain the server value. + +```typescript +{ + strategy: 'crdt' +} +``` + +### Manual Resolution + +Flags conflicts for manual resolution. Provide a callback to resolve or defer. + +```typescript +import { SyncEngine } from '@objectql/plugin-sync'; + +const engine = new SyncEngine({ + clientId: 'device-abc-123', + transport, + config: { enabled: true, strategy: 'manual' }, + onConflict(conflict) { + // Return merged data to resolve + return { + ...conflict.serverRecord, + ...conflict.clientMutation.data, + }; + // Or return undefined to keep as unresolved conflict + }, +}); +``` + +## 5. MutationLogger + +The `MutationLogger` is the client-side append-only log that records all mutations while offline. + +```typescript +import { MutationLogger } from '@objectql/plugin-sync'; + +const logger = new MutationLogger('device-abc-123'); + +// Record mutations +logger.append({ + objectName: 'task', + recordId: 'task-1', + operation: 'create', + data: { title: 'New task', status: 'pending' }, + baseVersion: null, +}); + +logger.append({ + objectName: 'task', + recordId: 'task-1', + operation: 'update', + data: { status: 'in_progress' }, + baseVersion: 1, +}); + +// Query pending mutations +const pending = logger.getPending(); // All pending +const taskPending = logger.getPendingForObject('task'); // Per-object + +// Acknowledge after sync +logger.acknowledge(['mutation-id-1', 'mutation-id-2']); +``` + +## 6. Sync Configuration in object.yml + +Enable per-object sync in your YAML metadata: + +```yaml +name: task +fields: + title: + type: string + status: + type: select + options: [pending, in_progress, completed] + assignee: + type: lookup + reference_to: users + +sync: + enabled: true + strategy: crdt + direction: bidirectional + debounce_ms: 2000 + batch_size: 25 +``` + +Server-side conflict fields can also be declared: + +```yaml +name: task +sync: + enabled: true + conflict_fields: + - title + - status + - assignee +``` + +## 7. End-to-End Sync Flow + +A complete example demonstrating client mutation, sync, and server-side processing. + +```typescript +// --- Client Side --- +import { SyncPlugin } from '@objectql/plugin-sync'; + +const syncPlugin = new SyncPlugin({ + clientId: 'mobile-device-42', + transport: { + async push(request) { + const res = await fetch('https://api.example.com/sync/push', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${token}`, + }, + body: JSON.stringify(request), + }); + return res.json(); + }, + }, + defaultConfig: { + enabled: true, + strategy: 'last-write-wins', + direction: 'bidirectional', + debounce_ms: 3000, + batch_size: 50, + }, + listeners: [{ + onSyncStart() { + console.log('⏳ Sync started...'); + }, + onSyncComplete(response) { + console.log('✅ Sync complete. Checkpoint:', response.checkpoint); + }, + onSyncError(error) { + console.error('❌ Sync failed:', error.message); + }, + onConflict(conflicts) { + console.warn(`⚠️ ${conflicts.length} conflict(s) detected`); + }, + onServerChanges(changes) { + console.log(`📥 ${changes.length} server change(s) received`); + // Apply server changes to local store + }, + }], +}); + +// Get the sync engine for 'task' object +const taskEngine = syncPlugin.getEngine('task'); + +// Record offline mutations +taskEngine.recordMutation({ + objectName: 'task', + recordId: 'task-100', + operation: 'update', + data: { status: 'completed', completed_at: new Date().toISOString() }, + baseVersion: 3, +}); + +// When back online, trigger sync +await taskEngine.sync(); +``` + +## 8. Sync Event Listeners + +| Event | Payload | Description | +|--------------------|------------------------------|------------------------------------| +| `onSyncStart` | — | Sync cycle has begun | +| `onSyncComplete` | `SyncPushResponse` | Sync cycle completed successfully | +| `onSyncError` | `Error` | Sync cycle failed | +| `onConflict` | `readonly SyncConflict[]` | Conflicts detected during sync | +| `onServerChanges` | `readonly SyncServerChange[]`| Server-side changes since last sync| diff --git a/packages/drivers/pg-wasm/src/index.ts b/packages/drivers/pg-wasm/src/index.ts index 0e129098..c9b7d96f 100644 --- a/packages/drivers/pg-wasm/src/index.ts +++ b/packages/drivers/pg-wasm/src/index.ts @@ -76,7 +76,9 @@ export class PgWasmDriver implements Driver { indexes: true, connectionPooling: false, preparedStatements: true, - queryCache: false + queryCache: false, + mutationLog: true, + changeTracking: true }; private config: Required; diff --git a/packages/drivers/sqlite-wasm/src/index.ts b/packages/drivers/sqlite-wasm/src/index.ts index b8e471eb..d4f0e3f6 100644 --- a/packages/drivers/sqlite-wasm/src/index.ts +++ b/packages/drivers/sqlite-wasm/src/index.ts @@ -73,7 +73,9 @@ export class SqliteWasmDriver implements Driver { indexes: true, connectionPooling: false, preparedStatements: true, - queryCache: false + queryCache: false, + mutationLog: true, + changeTracking: true }; private config: SqliteWasmDriverConfig; diff --git a/packages/foundation/edge-adapter/README.md b/packages/foundation/edge-adapter/README.md new file mode 100644 index 00000000..03950842 --- /dev/null +++ b/packages/foundation/edge-adapter/README.md @@ -0,0 +1,171 @@ +# @objectql/edge-adapter + +Edge runtime adapter for ObjectQL — runtime detection, capability validation, and driver binding resolution. + +## Features + +### 🌐 Runtime Detection +- Automatic detection of Cloudflare Workers, Deno Deploy, Vercel Edge, Bun, and Node.js +- Inspection of global objects with ordered specificity to avoid false positives + +### ✅ Capability Validation +- Validate runtime capabilities (WASM, persistent storage, WebSocket, scheduled triggers) +- Enforce minimum execution time requirements +- Pre-flight checks before driver initialization + +### 🔌 Driver Binding Resolution +- Automatic driver selection per runtime (e.g., `@objectql/driver-sqlite-wasm` for Cloudflare Workers) +- Explicit binding overrides for custom datasource configurations +- Default fallback bindings when no explicit config is provided + +### ⚡ Plugin Integration +- Implements `RuntimePlugin` for seamless kernel integration +- Registers edge context (runtime, capabilities, bindings) on the kernel +- Request-scoped connection support for stateless environments + +## Installation + +```bash +pnpm add @objectql/edge-adapter +``` + +## Quick Start + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new EdgeAdapterPlugin({ + // Auto-detect runtime (or override manually) + // runtime: 'cloudflare-workers', + requirements: { + wasm: true, + persistentStorage: true, + }, + }) + ] +}); + +await kernel.start(); +``` + +## Runtime Detection + +The `detectRuntime()` function inspects `globalThis` to determine the current edge runtime. + +```typescript +import { detectRuntime } from '@objectql/edge-adapter'; + +const runtime = detectRuntime(); +// => 'cloudflare-workers' | 'deno-deploy' | 'vercel-edge' | 'bun' | 'node' +``` + +Detection order (most specific first): + +| Runtime | Detection Signal | +|----------------------|-------------------------------------------| +| Cloudflare Workers | `globalThis.caches` + `globalThis.WebSocketPair` | +| Deno Deploy | `globalThis.Deno` | +| Bun | `globalThis.Bun` | +| Vercel Edge | `globalThis.EdgeRuntime` | +| Node.js | Default fallback | + +## Capability Validation + +Validate that the detected runtime meets your application requirements before initialization. + +```typescript +import { detectRuntime, validateCapabilities, getCapabilities } from '@objectql/edge-adapter'; + +const runtime = detectRuntime(); +const capabilities = getCapabilities(runtime); + +console.log(capabilities); +// { wasm: true, persistentStorage: true, webSocket: true, ... } + +const validation = validateCapabilities(runtime, { + wasm: true, + persistentStorage: true, + minExecutionTime: 30000, +}); + +if (!validation.valid) { + console.error('Missing capabilities:', validation.missing); +} +``` + +### CapabilityRequirement + +| Property | Type | Description | +|----------------------|-----------|-------------------------------------------| +| `wasm` | `boolean` | Requires WebAssembly support | +| `persistentStorage` | `boolean` | Requires persistent storage (KV, D1, etc.)| +| `webSocket` | `boolean` | Requires WebSocket support | +| `scheduledTriggers` | `boolean` | Requires scheduled/cron trigger support | +| `minExecutionTime` | `number` | Minimum execution time in milliseconds | + +## Driver Binding Configuration + +Each runtime has a recommended default driver. You can override bindings explicitly. + +```typescript +import { EdgeAdapterPlugin } from '@objectql/edge-adapter'; + +const plugin = new EdgeAdapterPlugin({ + runtime: 'cloudflare-workers', + bindings: { + default: { + driver: '@objectql/driver-sqlite-wasm', + binding: 'DB', + config: { pragma: { journal_mode: 'WAL' } }, + }, + cache: { + driver: '@objectql/driver-memory', + config: { maxSize: 1000 }, + }, + }, +}); +``` + +### Default Driver Bindings + +| Runtime | Default Driver | +|----------------------|---------------------------------| +| Cloudflare Workers | `@objectql/driver-sqlite-wasm` | +| Deno Deploy | `@objectql/driver-pg-wasm` | +| Vercel Edge | `@objectql/driver-memory` | +| Bun | `@objectql/driver-sqlite-wasm` | +| Node.js | `@objectql/driver-sql` | + +## API Reference + +### Functions + +| Function | Description | +|------------------------|------------------------------------------------------| +| `detectRuntime()` | Returns the detected `EdgeRuntime` string | +| `getCapabilities(rt)` | Returns the `EdgeCapabilities` for a given runtime | +| `validateCapabilities(rt, req)` | Validates a runtime against `CapabilityRequirement` | +| `getDefaultDriver(rt)` | Returns the recommended driver package name | +| `resolveBindings(cfg)` | Resolves `ResolvedBinding[]` from `EdgeAdapterConfig` | + +### Classes + +| Class | Description | +|------------------------|------------------------------------------------------| +| `EdgeAdapterPlugin` | `RuntimePlugin` — detects runtime, validates, binds | + +### Types + +| Type | Description | +|-------------------------------|-----------------------------------------------| +| `EdgeAdapterPluginConfig` | Plugin constructor options | +| `CapabilityRequirement` | Minimum capability requirements | +| `CapabilityValidationResult` | Result of `validateCapabilities()` | +| `ResolvedBinding` | Resolved driver binding for a datasource | + +## License + +MIT diff --git a/packages/foundation/edge-adapter/package.json b/packages/foundation/edge-adapter/package.json new file mode 100644 index 00000000..ccdfe520 --- /dev/null +++ b/packages/foundation/edge-adapter/package.json @@ -0,0 +1,37 @@ +{ + "name": "@objectql/edge-adapter", + "version": "4.2.0", + "description": "Edge runtime adapter for ObjectQL - runtime detection, capability validation, and driver binding resolution", + "keywords": [ + "objectql", + "edge", + "runtime", + "cloudflare", + "deno", + "vercel", + "adapter" + ], + "license": "MIT", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc", + "test": "vitest run" + }, + "dependencies": { + "@objectql/types": "workspace:*" + }, + "devDependencies": { + "typescript": "^5.3.0", + "vitest": "^1.0.4" + } +} diff --git a/packages/foundation/edge-adapter/src/binding-resolver.ts b/packages/foundation/edge-adapter/src/binding-resolver.ts new file mode 100644 index 00000000..12f2dcc9 --- /dev/null +++ b/packages/foundation/edge-adapter/src/binding-resolver.ts @@ -0,0 +1,68 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { EdgeAdapterConfig, EdgeRuntime } from '@objectql/types'; + +/** + * Default driver recommendations per runtime. + */ +const DEFAULT_BINDINGS: Readonly> = { + 'cloudflare-workers': '@objectql/driver-sqlite-wasm', + 'deno-deploy': '@objectql/driver-pg-wasm', + 'vercel-edge': '@objectql/driver-memory', + 'bun': '@objectql/driver-sqlite-wasm', + 'node': '@objectql/driver-sql', +}; + +/** + * A fully resolved driver binding for a named datasource. + */ +export interface ResolvedBinding { + readonly datasource: string; + readonly driver: string; + readonly binding?: string; + readonly config: Record; +} + +/** + * Returns the recommended default driver for a given runtime. + */ +export function getDefaultDriver(runtime: EdgeRuntime): string { + return DEFAULT_BINDINGS[runtime]; +} + +/** + * Resolves edge driver bindings from adapter configuration. + * + * If no explicit bindings are provided, a default binding is generated + * using the recommended driver for the target runtime. + */ +export function resolveBindings(config: EdgeAdapterConfig): readonly ResolvedBinding[] { + const bindings = config.bindings ?? {}; + const resolved: ResolvedBinding[] = []; + + for (const [datasource, binding] of Object.entries(bindings)) { + resolved.push({ + datasource, + driver: binding.driver, + binding: binding.binding, + config: binding.config ?? {}, + }); + } + + // If no bindings provided, resolve a default + if (resolved.length === 0) { + resolved.push({ + datasource: 'default', + driver: getDefaultDriver(config.runtime), + config: {}, + }); + } + + return resolved; +} diff --git a/packages/foundation/edge-adapter/src/capabilities.ts b/packages/foundation/edge-adapter/src/capabilities.ts new file mode 100644 index 00000000..34c6449f --- /dev/null +++ b/packages/foundation/edge-adapter/src/capabilities.ts @@ -0,0 +1,79 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { EdgeCapabilities, EdgeRuntime } from '@objectql/types'; +import { EDGE_CAPABILITIES } from '@objectql/types'; + +/** + * Declares the minimum capabilities a runtime must provide. + */ +export interface CapabilityRequirement { + readonly wasm?: boolean; + readonly persistentStorage?: boolean; + readonly webSocket?: boolean; + readonly scheduledTriggers?: boolean; + readonly minExecutionTime?: number; +} + +/** + * Result of validating a runtime against a set of requirements. + */ +export interface CapabilityValidationResult { + readonly valid: boolean; + readonly runtime: EdgeRuntime; + readonly capabilities: EdgeCapabilities; + readonly missing: readonly string[]; +} + +/** + * Returns the predefined capability profile for a given runtime. + */ +export function getCapabilities(runtime: EdgeRuntime): EdgeCapabilities { + return EDGE_CAPABILITIES[runtime]; +} + +/** + * Validates that a runtime satisfies the given capability requirements. + */ +export function validateCapabilities( + runtime: EdgeRuntime, + requirements: CapabilityRequirement, +): CapabilityValidationResult { + const capabilities = getCapabilities(runtime); + const missing: string[] = []; + + if (requirements.wasm && !capabilities.wasm) { + missing.push('WebAssembly'); + } + if (requirements.persistentStorage && !capabilities.persistentStorage) { + missing.push('Persistent Storage'); + } + if (requirements.webSocket && !capabilities.webSocket) { + missing.push('WebSocket'); + } + if (requirements.scheduledTriggers && !capabilities.scheduledTriggers) { + missing.push('Scheduled Triggers'); + } + if ( + requirements.minExecutionTime && + capabilities.maxExecutionTime !== undefined && + capabilities.maxExecutionTime !== Infinity && + capabilities.maxExecutionTime < requirements.minExecutionTime + ) { + missing.push( + `Execution time (need ${requirements.minExecutionTime}ms, max ${capabilities.maxExecutionTime}ms)`, + ); + } + + return { + valid: missing.length === 0, + runtime, + capabilities, + missing, + }; +} diff --git a/packages/foundation/edge-adapter/src/detector.ts b/packages/foundation/edge-adapter/src/detector.ts new file mode 100644 index 00000000..4d86a473 --- /dev/null +++ b/packages/foundation/edge-adapter/src/detector.ts @@ -0,0 +1,44 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { EdgeRuntime } from '@objectql/types'; + +/** + * Detects the current edge runtime environment by inspecting global objects. + * + * Detection order matters — more specific checks come first to avoid + * false positives (e.g., Cloudflare Workers before generic edge checks). + */ +export function detectRuntime(): EdgeRuntime { + // Check for Cloudflare Workers (has WebSocketPair in global scope) + if ( + typeof globalThis !== 'undefined' && + 'caches' in globalThis && + typeof (globalThis as Record).WebSocketPair !== 'undefined' + ) { + return 'cloudflare-workers'; + } + + // Check for Deno + if (typeof (globalThis as Record).Deno !== 'undefined') { + return 'deno-deploy'; + } + + // Check for Bun + if (typeof (globalThis as Record).Bun !== 'undefined') { + return 'bun'; + } + + // Check for Vercel Edge Runtime + if (typeof (globalThis as Record).EdgeRuntime !== 'undefined') { + return 'vercel-edge'; + } + + // Default to Node.js + return 'node'; +} diff --git a/packages/foundation/edge-adapter/src/index.test.ts b/packages/foundation/edge-adapter/src/index.test.ts new file mode 100644 index 00000000..bd0ece24 --- /dev/null +++ b/packages/foundation/edge-adapter/src/index.test.ts @@ -0,0 +1,171 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { describe, it, expect } from 'vitest'; +import { detectRuntime } from './detector.js'; +import { getCapabilities, validateCapabilities } from './capabilities.js'; +import { resolveBindings, getDefaultDriver } from './binding-resolver.js'; +import { EdgeAdapterPlugin } from './plugin.js'; + +describe('Edge Runtime Detector', () => { + it('should detect Node.js as default runtime', () => { + expect(detectRuntime()).toBe('node'); + }); + + it('should detect Deno runtime', () => { + (globalThis as Record).Deno = { version: { deno: '1.0.0' } }; + expect(detectRuntime()).toBe('deno-deploy'); + delete (globalThis as Record).Deno; + }); + + it('should detect Bun runtime', () => { + (globalThis as Record).Bun = { version: '1.0.0' }; + expect(detectRuntime()).toBe('bun'); + delete (globalThis as Record).Bun; + }); + + it('should detect Vercel Edge runtime', () => { + (globalThis as Record).EdgeRuntime = 'edge'; + expect(detectRuntime()).toBe('vercel-edge'); + delete (globalThis as Record).EdgeRuntime; + }); +}); + +describe('Capability Validator', () => { + it('should return capabilities for all runtimes', () => { + const runtimes = ['cloudflare-workers', 'deno-deploy', 'vercel-edge', 'bun', 'node'] as const; + for (const runtime of runtimes) { + const caps = getCapabilities(runtime); + expect(caps).toBeDefined(); + expect(typeof caps.wasm).toBe('boolean'); + } + }); + + it('should validate passing requirements', () => { + const result = validateCapabilities('node', { wasm: true, persistentStorage: true }); + expect(result.valid).toBe(true); + expect(result.missing).toHaveLength(0); + }); + + it('should validate failing requirements for vercel-edge', () => { + const result = validateCapabilities('vercel-edge', { + persistentStorage: true, + webSocket: true, + }); + expect(result.valid).toBe(false); + expect(result.missing).toContain('Persistent Storage'); + expect(result.missing).toContain('WebSocket'); + }); + + it('should check execution time limits', () => { + const result = validateCapabilities('vercel-edge', { + minExecutionTime: 60000, + }); + expect(result.valid).toBe(false); + expect(result.missing.length).toBeGreaterThan(0); + }); + + it('should pass execution time check for unlimited runtimes', () => { + const result = validateCapabilities('node', { + minExecutionTime: 60000, + }); + expect(result.valid).toBe(true); + }); +}); + +describe('Binding Resolver', () => { + it('should resolve explicit bindings', () => { + const resolved = resolveBindings({ + runtime: 'cloudflare-workers', + bindings: { + main: { + driver: '@objectql/driver-sqlite-wasm', + binding: 'D1_DATABASE', + config: { database: 'my-db' }, + }, + }, + }); + expect(resolved).toHaveLength(1); + expect(resolved[0].datasource).toBe('main'); + expect(resolved[0].driver).toBe('@objectql/driver-sqlite-wasm'); + expect(resolved[0].binding).toBe('D1_DATABASE'); + }); + + it('should provide default binding when none specified', () => { + const resolved = resolveBindings({ runtime: 'cloudflare-workers' }); + expect(resolved).toHaveLength(1); + expect(resolved[0].datasource).toBe('default'); + expect(resolved[0].driver).toBe('@objectql/driver-sqlite-wasm'); + }); + + it('should return default drivers for each runtime', () => { + expect(getDefaultDriver('cloudflare-workers')).toBe('@objectql/driver-sqlite-wasm'); + expect(getDefaultDriver('deno-deploy')).toBe('@objectql/driver-pg-wasm'); + expect(getDefaultDriver('vercel-edge')).toBe('@objectql/driver-memory'); + expect(getDefaultDriver('bun')).toBe('@objectql/driver-sqlite-wasm'); + expect(getDefaultDriver('node')).toBe('@objectql/driver-sql'); + }); +}); + +describe('EdgeAdapterPlugin', () => { + it('should have correct name and version', () => { + const plugin = new EdgeAdapterPlugin(); + expect(plugin.name).toBe('@objectql/edge-adapter'); + expect(plugin.version).toBe('4.2.0'); + }); + + it('should install with auto-detected runtime', async () => { + const plugin = new EdgeAdapterPlugin(); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + const edge = kernel['edge'] as Record; + expect(edge).toBeDefined(); + expect(edge.runtime).toBe('node'); + expect(edge.capabilities).toBeDefined(); + expect(edge.bindings).toBeDefined(); + }); + + it('should install with explicit runtime', async () => { + const plugin = new EdgeAdapterPlugin({ runtime: 'cloudflare-workers' }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + const edge = kernel['edge'] as Record; + expect(edge.runtime).toBe('cloudflare-workers'); + }); + + it('should throw when capabilities requirements are not met', async () => { + const plugin = new EdgeAdapterPlugin({ + runtime: 'vercel-edge', + requirements: { persistentStorage: true }, + }); + const kernel: Record = {}; + await expect(plugin.install({ engine: kernel })).rejects.toThrow('missing capabilities'); + }); + + it('should resolve bindings after install', async () => { + const plugin = new EdgeAdapterPlugin({ + runtime: 'cloudflare-workers', + bindings: { + primary: { driver: '@objectql/driver-sqlite-wasm', binding: 'D1' }, + }, + }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + const bindings = plugin.getResolvedBindings(); + expect(bindings).toHaveLength(1); + expect(bindings[0].datasource).toBe('primary'); + }); + + it('should support onStart and onStop lifecycle', async () => { + const plugin = new EdgeAdapterPlugin(); + const ctx = { engine: {} }; + await plugin.install(ctx); + await plugin.onStart(ctx); + await plugin.onStop(ctx); + }); +}); diff --git a/packages/foundation/edge-adapter/src/index.ts b/packages/foundation/edge-adapter/src/index.ts new file mode 100644 index 00000000..efb14bd3 --- /dev/null +++ b/packages/foundation/edge-adapter/src/index.ts @@ -0,0 +1,24 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +export { detectRuntime } from './detector.js'; +export { + getCapabilities, + validateCapabilities, + type CapabilityRequirement, + type CapabilityValidationResult, +} from './capabilities.js'; +export { + getDefaultDriver, + resolveBindings, + type ResolvedBinding, +} from './binding-resolver.js'; +export { + EdgeAdapterPlugin, + type EdgeAdapterPluginConfig, +} from './plugin.js'; diff --git a/packages/foundation/edge-adapter/src/plugin.ts b/packages/foundation/edge-adapter/src/plugin.ts new file mode 100644 index 00000000..b1467158 --- /dev/null +++ b/packages/foundation/edge-adapter/src/plugin.ts @@ -0,0 +1,93 @@ +/** + * ObjectQL + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { RuntimePlugin, RuntimeContext, EdgeAdapterConfig } from '@objectql/types'; +import { detectRuntime } from './detector.js'; +import { validateCapabilities, getCapabilities, type CapabilityRequirement } from './capabilities.js'; +import { resolveBindings, type ResolvedBinding } from './binding-resolver.js'; + +/** + * Configuration options for the EdgeAdapterPlugin. + */ +export interface EdgeAdapterPluginConfig { + /** Override auto-detected runtime */ + readonly runtime?: EdgeAdapterConfig['runtime']; + /** Edge driver bindings */ + readonly bindings?: EdgeAdapterConfig['bindings']; + /** Maximum execution time override */ + readonly maxExecutionTime?: number; + /** Enable request-scoped connections (default: true) */ + readonly requestScoped?: boolean; + /** Capability requirements to validate on startup */ + readonly requirements?: CapabilityRequirement; +} + +/** + * Edge adapter plugin for the ObjectStack runtime. + * + * Detects the current edge runtime, validates capabilities, + * resolves driver bindings, and registers edge context on the kernel. + */ +export class EdgeAdapterPlugin implements RuntimePlugin { + readonly name = '@objectql/edge-adapter'; + readonly version = '4.2.0'; + + private readonly config: EdgeAdapterPluginConfig; + private resolvedBindings: readonly ResolvedBinding[] = []; + + constructor(config: EdgeAdapterPluginConfig = {}) { + this.config = config; + } + + async install(ctx: RuntimeContext): Promise { + const runtime = this.config.runtime ?? detectRuntime(); + const capabilities = getCapabilities(runtime); + + // Validate capabilities if requirements specified + if (this.config.requirements) { + const validation = validateCapabilities(runtime, this.config.requirements); + if (!validation.valid) { + throw new Error( + `[${this.name}] Runtime '${runtime}' missing capabilities: ${validation.missing.join(', ')}`, + ); + } + } + + // Resolve driver bindings + const adapterConfig: EdgeAdapterConfig = { + runtime, + bindings: this.config.bindings, + maxExecutionTime: this.config.maxExecutionTime ?? capabilities.maxExecutionTime, + requestScoped: this.config.requestScoped ?? true, + }; + + this.resolvedBindings = resolveBindings(adapterConfig); + + // Register edge context on the kernel + const kernel = ctx.engine as Record; + kernel['edge'] = { + runtime, + capabilities, + bindings: this.resolvedBindings, + config: adapterConfig, + }; + } + + async onStart(_ctx: RuntimeContext): Promise { + // Edge runtimes are typically stateless; no long-lived connections needed + } + + async onStop(_ctx: RuntimeContext): Promise { + // Cleanup + } + + /** Get the resolved bindings (available after install) */ + getResolvedBindings(): readonly ResolvedBinding[] { + return this.resolvedBindings; + } +} diff --git a/packages/foundation/edge-adapter/tsconfig.json b/packages/foundation/edge-adapter/tsconfig.json new file mode 100644 index 00000000..16be6b2a --- /dev/null +++ b/packages/foundation/edge-adapter/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "skipLibCheck": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/foundation/plugin-sync/README.md b/packages/foundation/plugin-sync/README.md new file mode 100644 index 00000000..374cedd7 --- /dev/null +++ b/packages/foundation/plugin-sync/README.md @@ -0,0 +1,248 @@ +# @objectql/plugin-sync + +Offline-first sync plugin for ObjectQL — mutation logging, sync engine, and conflict resolution. + +## Features + +### 📝 Mutation Logging +- Client-side append-only mutation log +- Automatic sequence numbering and timestamping +- Per-object mutation filtering +- Batch acknowledgment after successful sync + +### 🔄 Sync Engine +- Push/pull synchronization between client and server +- Configurable sync direction (`push-only`, `pull-only`, `bidirectional`) +- Debounced auto-sync on mutation +- Checkpoint-based delta tracking + +### ⚔️ Conflict Resolution +- **Last-Write-Wins (LWW):** Timestamp-based resolution +- **CRDT:** Field-level LWW-Register merge +- **Manual:** Callback-driven resolution for user intervention +- Factory function for strategy selection + +### 📡 Event System +- Lifecycle listeners (`onSyncStart`, `onSyncComplete`, `onSyncError`) +- Conflict and server change notifications +- Global and per-engine listener support + +## Installation + +```bash +pnpm add @objectql/plugin-sync +``` + +## Quick Start + +```typescript +import { SyncPlugin } from '@objectql/plugin-sync'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new SyncPlugin({ + clientId: 'device-abc-123', + transport: myHttpTransport, + defaultConfig: { + enabled: true, + strategy: 'last-write-wins', + debounce_ms: 1000, + batch_size: 50, + }, + listeners: [{ + onSyncComplete(response) { + console.log('Sync complete:', response.checkpoint); + }, + onConflict(conflicts) { + console.warn('Conflicts detected:', conflicts.length); + }, + }], + }) + ] +}); + +await kernel.start(); +``` + +## MutationLogger + +The `MutationLogger` records all client-side mutations in an append-only log for later synchronization. + +```typescript +import { MutationLogger } from '@objectql/plugin-sync'; + +const logger = new MutationLogger('device-abc-123'); + +// Record a mutation +const entry = logger.append({ + objectName: 'task', + recordId: 'task-1', + operation: 'update', + data: { status: 'completed' }, + baseVersion: 3, +}); + +// Get all pending mutations +const pending = logger.getPending(); + +// Get pending for a specific object +const taskMutations = logger.getPendingForObject('task'); + +// Acknowledge synced mutations +logger.acknowledge(['mutation-id-1', 'mutation-id-2']); + +// Check pending count +console.log(logger.size); // => 0 +``` + +## Conflict Resolution Strategies + +### Last-Write-Wins (LWW) + +Compares client and server timestamps. The most recent write wins. + +```typescript +import { LastWriteWinsResolver } from '@objectql/plugin-sync'; + +const resolver = new LastWriteWinsResolver(); +const result = resolver.resolve(conflict); +// result.status => 'applied' (client wins) or 'conflict' (server wins) +``` + +### CRDT (Field-Level Merge) + +Performs field-level LWW-Register merge. Non-conflicting fields from the client are applied; conflicting fields retain the server value. + +```typescript +import { CrdtResolver } from '@objectql/plugin-sync'; + +const resolver = new CrdtResolver(); +const result = resolver.resolve(conflict); +// Merges non-conflicting fields from both sides +``` + +### Manual Resolution + +Flags conflicts for manual resolution via a user-provided callback. + +```typescript +import { ManualResolver } from '@objectql/plugin-sync'; + +const resolver = new ManualResolver((conflict) => { + // Return merged data to resolve, or undefined to keep as conflict + return { + ...conflict.serverRecord, + ...conflict.clientMutation.data, + resolved_by: 'user', + }; +}); +``` + +### Factory Function + +Use `createResolver()` to instantiate a resolver by strategy name. + +```typescript +import { createResolver } from '@objectql/plugin-sync'; + +const resolver = createResolver('crdt'); +// => CrdtResolver instance +``` + +## SyncEngine Configuration + +The `SyncEngine` orchestrates the full push/pull sync cycle. + +```typescript +import { SyncEngine } from '@objectql/plugin-sync'; +import type { SyncTransport } from '@objectql/plugin-sync'; + +const transport: SyncTransport = { + async push(request) { + const res = await fetch('/api/sync/push', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(request), + }); + return res.json(); + }, +}; + +const engine = new SyncEngine({ + clientId: 'device-abc-123', + transport, + config: { + enabled: true, + strategy: 'last-write-wins', + direction: 'bidirectional', + debounce_ms: 2000, + batch_size: 50, + }, +}); + +// Record a mutation (auto-triggers debounced sync) +engine.recordMutation({ + objectName: 'task', + recordId: 'task-42', + operation: 'update', + data: { title: 'Updated title' }, + baseVersion: 5, +}); + +// Manual sync trigger +const response = await engine.sync(); + +// Listen to sync events +engine.addListener({ + onSyncStart() { console.log('Syncing...'); }, + onSyncComplete(res) { console.log('Done:', res.checkpoint); }, + onSyncError(err) { console.error('Failed:', err.message); }, + onServerChanges(changes) { console.log('Server changes:', changes.length); }, +}); +``` + +### SyncConfig Options + +| Property | Type | Default | Description | +|-----------------|----------------------------------------------|--------------------|--------------------------------------| +| `enabled` | `boolean` | `true` | Enable sync for this object | +| `strategy` | `'last-write-wins'` \| `'crdt'` \| `'manual'` | `'last-write-wins'` | Conflict resolution strategy | +| `direction` | `'push-only'` \| `'pull-only'` \| `'bidirectional'` | `'bidirectional'` | Sync direction | +| `debounce_ms` | `number` | `1000` | Debounce interval for auto-sync (ms) | +| `batch_size` | `number` | `50` | Maximum mutations per sync request | + +## API Reference + +### Classes + +| Class | Description | +|------------------------|------------------------------------------------------| +| `SyncPlugin` | `RuntimePlugin` — registers sync capabilities on kernel | +| `SyncEngine` | Client-side push/pull sync orchestrator | +| `MutationLogger` | Append-only client-side mutation log | +| `LastWriteWinsResolver`| Timestamp-based LWW conflict resolver | +| `CrdtResolver` | Field-level LWW-Register merge resolver | +| `ManualResolver` | Callback-driven manual conflict resolver | + +### Functions + +| Function | Description | +|------------------------|------------------------------------------------------| +| `createResolver(strategy, onConflict?)` | Factory for `ConflictResolver` instances | + +### Types + +| Type | Description | +|------------------------|------------------------------------------------------| +| `SyncPluginConfig` | Plugin constructor options | +| `SyncTransport` | Transport interface for push requests | +| `SyncEventListener` | Listener interface for sync lifecycle events | +| `ConflictResolver` | Interface for conflict resolution strategies | +| `SyncConfig` | Per-object sync configuration | +| `MutationLogEntry` | Single mutation log record | +| `SyncConflict` | Conflict descriptor with client/server data | + +## License + +MIT diff --git a/packages/foundation/plugin-sync/package.json b/packages/foundation/plugin-sync/package.json new file mode 100644 index 00000000..c8c4bc6d --- /dev/null +++ b/packages/foundation/plugin-sync/package.json @@ -0,0 +1,27 @@ +{ + "name": "@objectql/plugin-sync", + "version": "4.2.0", + "description": "Offline-first sync plugin for ObjectQL - mutation logging, sync engine, and conflict resolution", + "keywords": ["objectql", "sync", "offline", "conflict-resolution", "crdt", "mutation-log"], + "license": "MIT", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "files": ["dist"], + "scripts": { + "build": "tsc", + "test": "vitest run" + }, + "dependencies": { + "@objectql/types": "workspace:*" + }, + "devDependencies": { + "typescript": "^5.3.0", + "vitest": "^1.0.4" + } +} diff --git a/packages/foundation/plugin-sync/src/conflict-resolver.ts b/packages/foundation/plugin-sync/src/conflict-resolver.ts new file mode 100644 index 00000000..b8762e29 --- /dev/null +++ b/packages/foundation/plugin-sync/src/conflict-resolver.ts @@ -0,0 +1,119 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { SyncConflict, SyncMutationResult } from '@objectql/types'; + +/** + * Interface for conflict resolution strategies. + */ +export interface ConflictResolver { + readonly strategy: string; + resolve(conflict: SyncConflict): SyncMutationResult; +} + +/** + * Last-Write-Wins resolver. + * Accepts the mutation with the most recent timestamp. + */ +export class LastWriteWinsResolver implements ConflictResolver { + readonly strategy = 'last-write-wins'; + + resolve(conflict: SyncConflict): SyncMutationResult { + const clientTime = new Date(conflict.clientMutation.timestamp).getTime(); + const serverTime = conflict.serverRecord['updated_at'] + ? new Date(conflict.serverRecord['updated_at'] as string).getTime() + : 0; + + if (clientTime >= serverTime) { + // Client wins - apply the mutation + return { status: 'applied', serverVersion: (conflict.clientMutation.baseVersion ?? 0) + 1 }; + } + // Server wins - reject the mutation + return { + status: 'conflict', + conflict: { + ...conflict, + suggestedResolution: conflict.serverRecord, + }, + }; + } +} + +/** + * CRDT (Conflict-free Replicated Data Type) resolver. + * Performs field-level LWW-Register merge. Each field is resolved independently. + */ +export class CrdtResolver implements ConflictResolver { + readonly strategy = 'crdt'; + + resolve(conflict: SyncConflict): SyncMutationResult { + const clientData = conflict.clientMutation.data ?? {}; + const serverData = conflict.serverRecord; + const merged: Record = { ...serverData }; + + // Field-level merge: client fields override server fields + // unless the field is explicitly in the conflicting set + for (const [key, value] of Object.entries(clientData)) { + if (!conflict.conflictingFields.includes(key)) { + merged[key] = value; + } + // For conflicting fields, keep server value (LWW at field level) + } + + return { + status: 'applied', + serverVersion: (conflict.clientMutation.baseVersion ?? 0) + 1, + }; + } +} + +/** + * Manual conflict resolver. + * Flags all conflicts for manual resolution via a callback. + */ +export class ManualResolver implements ConflictResolver { + readonly strategy = 'manual'; + + private readonly onConflict?: (conflict: SyncConflict) => Record | undefined; + + constructor(onConflict?: (conflict: SyncConflict) => Record | undefined) { + this.onConflict = onConflict; + } + + resolve(conflict: SyncConflict): SyncMutationResult { + if (this.onConflict) { + const resolution = this.onConflict(conflict); + if (resolution) { + return { + status: 'applied', + serverVersion: (conflict.clientMutation.baseVersion ?? 0) + 1, + }; + } + } + return { status: 'conflict', conflict }; + } +} + +/** + * Factory function to create the appropriate resolver. + */ +export function createResolver( + strategy: string, + onConflict?: (conflict: SyncConflict) => Record | undefined +): ConflictResolver { + switch (strategy) { + case 'last-write-wins': + return new LastWriteWinsResolver(); + case 'crdt': + return new CrdtResolver(); + case 'manual': + return new ManualResolver(onConflict); + default: + return new LastWriteWinsResolver(); + } +} diff --git a/packages/foundation/plugin-sync/src/index.test.ts b/packages/foundation/plugin-sync/src/index.test.ts new file mode 100644 index 00000000..1839fbb7 --- /dev/null +++ b/packages/foundation/plugin-sync/src/index.test.ts @@ -0,0 +1,556 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { MutationLogger } from './mutation-logger.js'; +import { LastWriteWinsResolver, CrdtResolver, ManualResolver, createResolver } from './conflict-resolver.js'; +import { SyncEngine, type SyncTransport } from './sync-engine.js'; +import { SyncPlugin } from './plugin.js'; +import type { SyncConflict, SyncPushResponse } from '@objectql/types'; + +// ============================================================================ +// MutationLogger Tests +// ============================================================================ + +describe('MutationLogger', () => { + let logger: MutationLogger; + + beforeEach(() => { + logger = new MutationLogger('client-1'); + }); + + it('should initialize with zero pending mutations', () => { + expect(logger.size).toBe(0); + expect(logger.getPending()).toHaveLength(0); + }); + + it('should append mutations with incrementing sequence', () => { + const m1 = logger.append({ + objectName: 'task', + recordId: '1', + operation: 'create', + data: { title: 'Test' }, + baseVersion: null, + }); + const m2 = logger.append({ + objectName: 'task', + recordId: '2', + operation: 'create', + data: { title: 'Test 2' }, + baseVersion: null, + }); + + expect(m1.sequence).toBe(1); + expect(m2.sequence).toBe(2); + expect(m1.clientId).toBe('client-1'); + expect(logger.size).toBe(2); + }); + + it('should generate unique IDs', () => { + const m1 = logger.append({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + const m2 = logger.append({ objectName: 'task', recordId: '2', operation: 'create', baseVersion: null }); + expect(m1.id).not.toBe(m2.id); + }); + + it('should filter pending by object name', () => { + logger.append({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + logger.append({ objectName: 'project', recordId: '1', operation: 'create', baseVersion: null }); + logger.append({ objectName: 'task', recordId: '2', operation: 'update', data: { title: 'Updated' }, baseVersion: 1 }); + + expect(logger.getPendingForObject('task')).toHaveLength(2); + expect(logger.getPendingForObject('project')).toHaveLength(1); + }); + + it('should acknowledge synced mutations', () => { + const m1 = logger.append({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + const m2 = logger.append({ objectName: 'task', recordId: '2', operation: 'create', baseVersion: null }); + logger.append({ objectName: 'task', recordId: '3', operation: 'create', baseVersion: null }); + + logger.acknowledge([m1.id, m2.id]); + expect(logger.size).toBe(1); + expect(logger.getPending()[0].recordId).toBe('3'); + }); + + it('should clear all mutations', () => { + logger.append({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + logger.append({ objectName: 'task', recordId: '2', operation: 'create', baseVersion: null }); + logger.clear(); + expect(logger.size).toBe(0); + }); + + it('should return correct clientId', () => { + expect(logger.getClientId()).toBe('client-1'); + }); + + it('should record timestamps', () => { + const before = new Date().toISOString(); + const m = logger.append({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + const after = new Date().toISOString(); + expect(m.timestamp >= before).toBe(true); + expect(m.timestamp <= after).toBe(true); + }); +}); + +// ============================================================================ +// Conflict Resolver Tests +// ============================================================================ + +describe('LastWriteWinsResolver', () => { + const resolver = new LastWriteWinsResolver(); + + it('should have correct strategy name', () => { + expect(resolver.strategy).toBe('last-write-wins'); + }); + + it('should apply client mutation when client timestamp is newer', () => { + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Client' }, + timestamp: '2026-01-02T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: { title: 'Server', updated_at: '2026-01-01T00:00:00.000Z' }, + conflictingFields: ['title'], + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('applied'); + }); + + it('should flag conflict when server timestamp is newer', () => { + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Client' }, + timestamp: '2026-01-01T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: { title: 'Server', updated_at: '2026-01-02T00:00:00.000Z' }, + conflictingFields: ['title'], + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('conflict'); + }); +}); + +describe('CrdtResolver', () => { + const resolver = new CrdtResolver(); + + it('should have correct strategy name', () => { + expect(resolver.strategy).toBe('crdt'); + }); + + it('should apply non-conflicting field changes', () => { + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { description: 'Updated desc' }, + timestamp: '2026-01-01T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: { title: 'Server Title', description: 'Old desc' }, + conflictingFields: ['title'], // Only title is conflicting + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('applied'); + }); +}); + +describe('ManualResolver', () => { + it('should have correct strategy name', () => { + const resolver = new ManualResolver(); + expect(resolver.strategy).toBe('manual'); + }); + + it('should flag conflict when no callback is provided', () => { + const resolver = new ManualResolver(); + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Client' }, + timestamp: '2026-01-01T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: { title: 'Server' }, + conflictingFields: ['title'], + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('conflict'); + }); + + it('should apply resolution when callback returns data', () => { + const resolver = new ManualResolver(() => ({ title: 'Merged' })); + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Client' }, + timestamp: '2026-01-01T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: { title: 'Server' }, + conflictingFields: ['title'], + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('applied'); + }); + + it('should flag conflict when callback returns undefined', () => { + const resolver = new ManualResolver(() => undefined); + const conflict: SyncConflict = { + objectName: 'task', + recordId: '1', + clientMutation: { + id: 'mut-1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: {}, + timestamp: '2026-01-01T00:00:00.000Z', + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }, + serverRecord: {}, + conflictingFields: ['title'], + }; + + const result = resolver.resolve(conflict); + expect(result.status).toBe('conflict'); + }); +}); + +describe('createResolver', () => { + it('should create last-write-wins resolver', () => { + const resolver = createResolver('last-write-wins'); + expect(resolver.strategy).toBe('last-write-wins'); + }); + + it('should create crdt resolver', () => { + const resolver = createResolver('crdt'); + expect(resolver.strategy).toBe('crdt'); + }); + + it('should create manual resolver', () => { + const resolver = createResolver('manual'); + expect(resolver.strategy).toBe('manual'); + }); + + it('should default to last-write-wins for unknown strategy', () => { + const resolver = createResolver('unknown'); + expect(resolver.strategy).toBe('last-write-wins'); + }); +}); + +// ============================================================================ +// SyncEngine Tests +// ============================================================================ + +describe('SyncEngine', () => { + let transport: SyncTransport; + let engine: SyncEngine; + + beforeEach(() => { + transport = { + push: vi.fn().mockResolvedValue({ + results: [{ status: 'applied', serverVersion: 1 }], + serverChanges: [], + checkpoint: 'cp-1', + } satisfies SyncPushResponse), + }; + + engine = new SyncEngine({ + clientId: 'client-1', + transport, + config: { + enabled: true, + strategy: 'last-write-wins', + debounce_ms: 100, + batch_size: 10, + }, + }); + }); + + it('should record mutations', () => { + engine.recordMutation({ + objectName: 'task', + recordId: '1', + operation: 'create', + data: { title: 'Test' }, + baseVersion: null, + }); + expect(engine.getMutationLogger().size).toBe(1); + }); + + it('should sync pending mutations', async () => { + engine.recordMutation({ + objectName: 'task', + recordId: '1', + operation: 'create', + data: { title: 'Test' }, + baseVersion: null, + }); + + engine.cancelScheduledSync(); + const response = await engine.sync(); + + expect(response).not.toBeNull(); + expect(transport.push).toHaveBeenCalledTimes(1); + expect(engine.getMutationLogger().size).toBe(0); // acknowledged + expect(engine.getCheckpoint()).toBe('cp-1'); + }); + + it('should return null when already syncing', async () => { + // Slow transport + (transport.push as ReturnType).mockImplementation( + () => new Promise(resolve => setTimeout(() => resolve({ + results: [], + serverChanges: [], + checkpoint: 'cp-1', + }), 100)) + ); + + engine.recordMutation({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + engine.cancelScheduledSync(); + + const promise1 = engine.sync(); + const promise2 = engine.sync(); // Should return null + + expect(await promise2).toBeNull(); + await promise1; + }); + + it('should not sync when direction is pull-only', async () => { + const pullEngine = new SyncEngine({ + clientId: 'client-1', + transport, + config: { enabled: true, direction: 'pull-only' }, + }); + + pullEngine.recordMutation({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + const response = await pullEngine.sync(); + expect(response).toBeNull(); + expect(transport.push).not.toHaveBeenCalled(); + }); + + it('should handle sync errors gracefully', async () => { + (transport.push as ReturnType).mockRejectedValue(new Error('Network error')); + + engine.recordMutation({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + engine.cancelScheduledSync(); + + const errorListener = vi.fn(); + engine.addListener({ onSyncError: errorListener }); + + const response = await engine.sync(); + expect(response).toBeNull(); + expect(errorListener).toHaveBeenCalled(); + expect(engine.getMutationLogger().size).toBe(1); // Not acknowledged + }); + + it('should notify listeners on sync events', async () => { + const listener = { + onSyncStart: vi.fn(), + onSyncComplete: vi.fn(), + onServerChanges: vi.fn(), + }; + engine.addListener(listener); + + (transport.push as ReturnType).mockResolvedValue({ + results: [{ status: 'applied', serverVersion: 1 }], + serverChanges: [{ objectName: 'task', recordId: '2', operation: 'create', data: { title: 'Server' }, serverVersion: 1, timestamp: new Date().toISOString() }], + checkpoint: 'cp-2', + } satisfies SyncPushResponse); + + engine.recordMutation({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + engine.cancelScheduledSync(); + await engine.sync(); + + expect(listener.onSyncStart).toHaveBeenCalled(); + expect(listener.onSyncComplete).toHaveBeenCalled(); + expect(listener.onServerChanges).toHaveBeenCalled(); + }); + + it('should remove listeners', () => { + const listener = { onSyncStart: vi.fn() }; + engine.addListener(listener); + engine.removeListener(listener); + // No way to assert directly, but it should not throw + }); + + it('should set and get checkpoint', () => { + engine.setCheckpoint('cp-100'); + expect(engine.getCheckpoint()).toBe('cp-100'); + }); + + it('should report syncing state', () => { + expect(engine.isSyncing()).toBe(false); + }); + + it('should batch mutations according to batch_size', async () => { + for (let i = 0; i < 15; i++) { + engine.recordMutation({ + objectName: 'task', + recordId: String(i), + operation: 'create', + baseVersion: null, + }); + } + + (transport.push as ReturnType).mockResolvedValue({ + results: Array(10).fill({ status: 'applied', serverVersion: 1 }), + serverChanges: [], + checkpoint: 'cp-batch', + }); + + engine.cancelScheduledSync(); + await engine.sync(); + + const call = (transport.push as ReturnType).mock.calls[0][0]; + expect(call.mutations).toHaveLength(10); // batch_size is 10 + expect(engine.getMutationLogger().size).toBe(5); // 15 - 10 = 5 remaining + }); +}); + +// ============================================================================ +// SyncPlugin Tests +// ============================================================================ + +describe('SyncPlugin', () => { + it('should have correct name and version', () => { + const plugin = new SyncPlugin({ + clientId: 'test', + transport: { push: vi.fn() }, + }); + expect(plugin.name).toBe('@objectql/plugin-sync'); + expect(plugin.version).toBe('4.2.0'); + }); + + it('should install sync capabilities on kernel', async () => { + const plugin = new SyncPlugin({ + clientId: 'test', + transport: { push: vi.fn() }, + }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + + expect(kernel['sync']).toBeDefined(); + expect((kernel['sync'] as Record).getEngine).toBeDefined(); + expect((kernel['sync'] as Record).getAllEngines).toBeDefined(); + expect((kernel['sync'] as Record).syncAll).toBeDefined(); + }); + + it('should create engines lazily', async () => { + const plugin = new SyncPlugin({ + clientId: 'test', + transport: { push: vi.fn() }, + }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + + expect(plugin.getAllEngines().size).toBe(0); + + const engine = plugin.getEngine('task'); + expect(engine).toBeDefined(); + expect(plugin.getAllEngines().size).toBe(1); + }); + + it('should reuse engines for the same object', () => { + const plugin = new SyncPlugin({ + clientId: 'test', + transport: { push: vi.fn() }, + }); + + const engine1 = plugin.getEngine('task'); + const engine2 = plugin.getEngine('task'); + expect(engine1).toBe(engine2); + }); + + it('should cleanup on stop', async () => { + const plugin = new SyncPlugin({ + clientId: 'test', + transport: { push: vi.fn() }, + }); + const ctx = { engine: {} }; + await plugin.install(ctx); + + plugin.getEngine('task'); + plugin.getEngine('project'); + expect(plugin.getAllEngines().size).toBe(2); + + await plugin.onStop!(ctx); + expect(plugin.getAllEngines().size).toBe(0); + }); + + it('should sync all engines', async () => { + const transport = { + push: vi.fn().mockResolvedValue({ + results: [], + serverChanges: [], + checkpoint: 'cp-1', + }), + }; + + const plugin = new SyncPlugin({ + clientId: 'test', + transport, + }); + + const engine1 = plugin.getEngine('task'); + const engine2 = plugin.getEngine('project'); + + // Add mutations to both engines + engine1.recordMutation({ objectName: 'task', recordId: '1', operation: 'create', baseVersion: null }); + engine2.recordMutation({ objectName: 'project', recordId: '1', operation: 'create', baseVersion: null }); + + engine1.cancelScheduledSync(); + engine2.cancelScheduledSync(); + + await plugin.syncAll(); + expect(transport.push).toHaveBeenCalledTimes(2); + }); +}); diff --git a/packages/foundation/plugin-sync/src/index.ts b/packages/foundation/plugin-sync/src/index.ts new file mode 100644 index 00000000..5cb789db --- /dev/null +++ b/packages/foundation/plugin-sync/src/index.ts @@ -0,0 +1,40 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// Main plugin export +export { SyncPlugin } from './plugin.js'; +export type { SyncPluginConfig } from './plugin.js'; + +// Core components +export { SyncEngine } from './sync-engine.js'; +export type { SyncTransport, SyncEventListener } from './sync-engine.js'; + +export { MutationLogger } from './mutation-logger.js'; + +// Conflict resolution +export { + LastWriteWinsResolver, + CrdtResolver, + ManualResolver, + createResolver, +} from './conflict-resolver.js'; +export type { ConflictResolver } from './conflict-resolver.js'; + +// Re-export sync types from @objectql/types for convenience +export type { + SyncConfig, + SyncStrategy, + MutationLogEntry, + MutationOperation, + SyncConflict, + SyncMutationResult, + SyncPushRequest, + SyncPushResponse, + SyncServerChange, + SyncEndpointConfig, +} from '@objectql/types'; diff --git a/packages/foundation/plugin-sync/src/mutation-logger.ts b/packages/foundation/plugin-sync/src/mutation-logger.ts new file mode 100644 index 00000000..8812432f --- /dev/null +++ b/packages/foundation/plugin-sync/src/mutation-logger.ts @@ -0,0 +1,94 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { MutationLogEntry, MutationOperation } from '@objectql/types'; + +/** + * Client-side append-only mutation log. + * Records all mutations while offline for later sync. + */ +export class MutationLogger { + private readonly log: MutationLogEntry[] = []; + private sequence = 0; + private readonly clientId: string; + + constructor(clientId: string) { + this.clientId = clientId; + } + + /** Record a mutation in the log */ + append(entry: { + objectName: string; + recordId: string | number; + operation: MutationOperation; + data?: Record; + baseVersion: number | null; + }): MutationLogEntry { + this.sequence += 1; + const mutation: MutationLogEntry = { + id: this.generateId(), + objectName: entry.objectName, + recordId: entry.recordId, + operation: entry.operation, + data: entry.data, + timestamp: new Date().toISOString(), + clientId: this.clientId, + sequence: this.sequence, + baseVersion: entry.baseVersion, + }; + this.log.push(mutation); + return mutation; + } + + /** Get all pending mutations (not yet synced) */ + getPending(): readonly MutationLogEntry[] { + return [...this.log]; + } + + /** Get pending mutations for a specific object */ + getPendingForObject(objectName: string): readonly MutationLogEntry[] { + return this.log.filter(e => e.objectName === objectName); + } + + /** Remove mutations that have been successfully synced */ + acknowledge(mutationIds: readonly string[]): void { + const idSet = new Set(mutationIds); + // Remove acknowledged mutations + for (let i = this.log.length - 1; i >= 0; i--) { + if (idSet.has(this.log[i].id)) { + this.log.splice(i, 1); + } + } + } + + /** Clear all pending mutations */ + clear(): void { + this.log.length = 0; + } + + /** Get the number of pending mutations */ + get size(): number { + return this.log.length; + } + + /** Get the client identifier */ + getClientId(): string { + return this.clientId; + } + + private generateId(): string { + // Simple UUID v4-like generation for environments without crypto.randomUUID + const hex = '0123456789abcdef'; + let id = ''; + for (let i = 0; i < 32; i++) { + id += hex[Math.floor(Math.random() * 16)]; + if (i === 7 || i === 11 || i === 15 || i === 19) id += '-'; + } + return id; + } +} diff --git a/packages/foundation/plugin-sync/src/plugin.ts b/packages/foundation/plugin-sync/src/plugin.ts new file mode 100644 index 00000000..c156660e --- /dev/null +++ b/packages/foundation/plugin-sync/src/plugin.ts @@ -0,0 +1,108 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { RuntimePlugin, RuntimeContext, SyncConfig } from '@objectql/types'; +import { SyncEngine, type SyncTransport, type SyncEventListener } from './sync-engine.js'; + +/** + * Configuration for the sync plugin. + */ +export interface SyncPluginConfig { + /** Client device identifier */ + readonly clientId: string; + /** Default sync configuration for objects that don't specify their own */ + readonly defaultConfig?: SyncConfig; + /** Sync transport implementation */ + readonly transport: SyncTransport; + /** Global sync event listeners */ + readonly listeners?: readonly SyncEventListener[]; +} + +/** + * ObjectQL Sync Plugin + * + * Provides offline-first sync capabilities for ObjectQL applications. + * Records mutations locally and synchronizes with the server when online. + */ +export class SyncPlugin implements RuntimePlugin { + name = '@objectql/plugin-sync'; + version = '4.2.0'; + + private readonly config: SyncPluginConfig; + private readonly engines: Map = new Map(); + + constructor(config: SyncPluginConfig) { + this.config = config; + } + + async install(ctx: RuntimeContext): Promise { + // Register sync capabilities on the kernel + const kernel = ctx.engine as Record; + kernel['sync'] = { + getEngine: (objectName: string) => this.getEngine(objectName), + getAllEngines: () => this.getAllEngines(), + syncAll: () => this.syncAll(), + }; + } + + async onStart(_ctx: RuntimeContext): Promise { + // Engines are created lazily when sync is first needed + } + + async onStop(_ctx: RuntimeContext): Promise { + // Cancel all scheduled syncs + for (const engine of this.engines.values()) { + engine.cancelScheduledSync(); + } + this.engines.clear(); + } + + /** + * Get or create a sync engine for the given object. + */ + getEngine(objectName: string, syncConfig?: SyncConfig): SyncEngine { + let engine = this.engines.get(objectName); + if (!engine) { + const config = syncConfig ?? this.config.defaultConfig ?? { + enabled: true, + strategy: 'last-write-wins' as const, + }; + engine = new SyncEngine({ + clientId: this.config.clientId, + transport: this.config.transport, + config, + }); + // Add global listeners + if (this.config.listeners) { + for (const listener of this.config.listeners) { + engine.addListener(listener); + } + } + this.engines.set(objectName, engine); + } + return engine; + } + + /** + * Get all active sync engines. + */ + getAllEngines(): ReadonlyMap { + return this.engines; + } + + /** + * Trigger sync for all active engines. + */ + async syncAll(): Promise { + const promises: Promise[] = []; + for (const engine of this.engines.values()) { + promises.push(engine.sync()); + } + await Promise.all(promises); + } +} diff --git a/packages/foundation/plugin-sync/src/sync-engine.ts b/packages/foundation/plugin-sync/src/sync-engine.ts new file mode 100644 index 00000000..2fbbf9e6 --- /dev/null +++ b/packages/foundation/plugin-sync/src/sync-engine.ts @@ -0,0 +1,200 @@ +/** + * ObjectQL Sync Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { + MutationLogEntry, + MutationOperation, + SyncConflict, + SyncPushRequest, + SyncPushResponse, + SyncServerChange, + SyncConfig, +} from '@objectql/types'; +import { MutationLogger } from './mutation-logger.js'; +import type { ConflictResolver } from './conflict-resolver.js'; +import { createResolver } from './conflict-resolver.js'; + +/** + * Sync transport interface — abstracts the HTTP/WebSocket layer. + */ +export interface SyncTransport { + push(request: SyncPushRequest): Promise; +} + +/** + * Sync event listener. + */ +export interface SyncEventListener { + onSyncStart?(): void; + onSyncComplete?(response: SyncPushResponse): void; + onSyncError?(error: Error): void; + onConflict?(conflicts: readonly SyncConflict[]): void; + onServerChanges?(changes: readonly SyncServerChange[]): void; +} + +/** + * Client-side sync engine. + * Orchestrates push/pull synchronization between client and server. + */ +export class SyncEngine { + private readonly logger: MutationLogger; + private readonly transport: SyncTransport; + private readonly resolver: ConflictResolver; + private readonly config: SyncConfig; + private readonly listeners: SyncEventListener[] = []; + private checkpoint: string | null = null; + private syncing = false; + private debounceTimer: ReturnType | null = null; + + constructor(options: { + clientId: string; + transport: SyncTransport; + config: SyncConfig; + onConflict?: (conflict: SyncConflict) => Record | undefined; + }) { + this.logger = new MutationLogger(options.clientId); + this.transport = options.transport; + this.config = options.config; + this.resolver = createResolver(options.config.strategy ?? 'last-write-wins', options.onConflict); + } + + /** Record a mutation and optionally trigger debounced sync */ + recordMutation(entry: { + objectName: string; + recordId: string | number; + operation: MutationOperation; + data?: Record; + baseVersion: number | null; + }): MutationLogEntry { + const mutation = this.logger.append(entry); + this.scheduleSync(); + return mutation; + } + + /** Manually trigger a sync cycle */ + async sync(): Promise { + if (this.syncing) return null; + if (this.config.direction === 'pull-only') return null; + + const pending = this.logger.getPending(); + if (pending.length === 0 && this.config.direction === 'push-only') return null; + + this.syncing = true; + this.notifyListeners('onSyncStart'); + + try { + const batchSize = this.config.batch_size ?? 50; + const batch = pending.slice(0, batchSize); + + const request: SyncPushRequest = { + clientId: this.logger.getClientId(), + mutations: batch, + lastCheckpoint: this.checkpoint, + }; + + const response = await this.transport.push(request); + + // Process results + const acknowledgedIds: string[] = []; + const conflicts: SyncConflict[] = []; + + for (let i = 0; i < response.results.length; i++) { + const result = response.results[i]; + if (result.status === 'applied') { + acknowledgedIds.push(batch[i].id); + } else if (result.status === 'conflict') { + conflicts.push(result.conflict); + } + } + + // Acknowledge applied mutations + this.logger.acknowledge(acknowledgedIds); + + // Update checkpoint + this.checkpoint = response.checkpoint; + + // Notify listeners + if (conflicts.length > 0) { + this.notifyListeners('onConflict', conflicts); + } + if (response.serverChanges.length > 0) { + this.notifyListeners('onServerChanges', response.serverChanges); + } + this.notifyListeners('onSyncComplete', response); + + return response; + } catch (error) { + this.notifyListeners('onSyncError', error instanceof Error ? error : new Error(String(error))); + return null; + } finally { + this.syncing = false; + } + } + + /** Add a sync event listener */ + addListener(listener: SyncEventListener): void { + this.listeners.push(listener); + } + + /** Remove a sync event listener */ + removeListener(listener: SyncEventListener): void { + const idx = this.listeners.indexOf(listener); + if (idx !== -1) this.listeners.splice(idx, 1); + } + + /** Get the mutation logger */ + getMutationLogger(): MutationLogger { + return this.logger; + } + + /** Get the current checkpoint */ + getCheckpoint(): string | null { + return this.checkpoint; + } + + /** Set the checkpoint (e.g., from persisted state) */ + setCheckpoint(checkpoint: string | null): void { + this.checkpoint = checkpoint; + } + + /** Check if a sync is in progress */ + isSyncing(): boolean { + return this.syncing; + } + + /** Cancel any scheduled sync */ + cancelScheduledSync(): void { + if (this.debounceTimer) { + clearTimeout(this.debounceTimer); + this.debounceTimer = null; + } + } + + private scheduleSync(): void { + if (this.config.direction === 'pull-only') return; + + const debounce = this.config.debounce_ms ?? 1000; + this.cancelScheduledSync(); + this.debounceTimer = setTimeout(() => { + void this.sync(); + }, debounce); + } + + private notifyListeners(event: keyof SyncEventListener, ...args: unknown[]): void { + for (const listener of this.listeners) { + const handler = listener[event]; + if (typeof handler === 'function') { + try { + (handler as (...a: unknown[]) => void).apply(listener, args); + } catch { + // Swallow listener errors to prevent sync interruption + } + } + } + } +} diff --git a/packages/foundation/plugin-sync/tsconfig.json b/packages/foundation/plugin-sync/tsconfig.json new file mode 100644 index 00000000..16be6b2a --- /dev/null +++ b/packages/foundation/plugin-sync/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "skipLibCheck": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/packages/protocols/sync/README.md b/packages/protocols/sync/README.md new file mode 100644 index 00000000..ab8ac27e --- /dev/null +++ b/packages/protocols/sync/README.md @@ -0,0 +1,185 @@ +# @objectql/protocol-sync + +Server-side sync protocol for ObjectQL — delta computation, change tracking, and conflict resolution. + +## Features + +### 📥 Sync Request Handler +- Process client push requests with per-mutation conflict detection +- Optimistic concurrency control via record versioning +- Configurable maximum mutations per request +- Delta computation from client checkpoint + +### 📜 Change Log +- Server-side append-only change log with monotonic checkpoints +- Configurable retention policy (days) +- Checkpoint-based delta queries for efficient sync +- Automatic pruning of expired entries + +### 🔢 Version Store +- Per-record version tracking for optimistic concurrency +- Atomic version increment on mutation +- Automatic cleanup on record deletion + +### ⚡ Plugin Integration +- Implements `RuntimePlugin` for seamless kernel integration +- Registers sync handler on the kernel context +- Configurable per-object conflict field definitions + +## Installation + +```bash +pnpm add @objectql/protocol-sync +``` + +## Quick Start + +```typescript +import { SyncProtocolPlugin } from '@objectql/protocol-sync'; +import { createKernel } from '@objectstack/runtime'; + +const kernel = createKernel({ + plugins: [ + new SyncProtocolPlugin({ + endpoint: { + enabled: true, + path: '/api/sync', + maxMutationsPerRequest: 100, + changeLogRetentionDays: 30, + }, + conflictFields: new Map([ + ['task', ['title', 'status', 'assignee']], + ['project', ['name', 'budget']], + ]), + }) + ] +}); + +await kernel.start(); +``` + +## Server-Side Handler + +The `SyncHandler` processes push requests by applying mutations, detecting conflicts, and returning server deltas. + +```typescript +import { SyncHandler, type RecordResolver } from '@objectql/protocol-sync'; + +const handler = new SyncHandler({ + config: { + enabled: true, + path: '/api/sync', + maxMutationsPerRequest: 100, + changeLogRetentionDays: 30, + }, + conflictFields: new Map([ + ['task', ['title', 'status']], + ]), +}); + +// Implement the RecordResolver interface +const resolver: RecordResolver = { + async getRecord(objectName, recordId) { + return db.collection(objectName).findOne({ _id: recordId }); + }, + async applyMutation(mutation, serverVersion) { + if (mutation.operation === 'create') { + await db.collection(mutation.objectName).insertOne({ + _id: mutation.recordId, + ...mutation.data, + _version: serverVersion, + }); + } else if (mutation.operation === 'update') { + await db.collection(mutation.objectName).updateOne( + { _id: mutation.recordId }, + { $set: { ...mutation.data, _version: serverVersion } }, + ); + } else if (mutation.operation === 'delete') { + await db.collection(mutation.objectName).deleteOne({ _id: mutation.recordId }); + } + }, +}; + +// Handle a push request +const response = await handler.handlePush(pushRequest, resolver); +// response.results => per-mutation results (applied | conflict | rejected) +// response.serverChanges => changes since client's last checkpoint +// response.checkpoint => current server checkpoint +``` + +## Change Log + +The `ChangeLog` records all server-side mutations for delta computation during sync. + +```typescript +import { ChangeLog } from '@objectql/protocol-sync'; + +const changeLog = new ChangeLog(30); // 30-day retention + +// Record a change +changeLog.record({ + objectName: 'task', + recordId: 'task-1', + operation: 'update', + data: { status: 'completed' }, + serverVersion: 5, +}); + +// Get changes since a checkpoint +const changes = changeLog.getChangesSince('42'); + +// Get current checkpoint +const checkpoint = changeLog.getCurrentCheckpoint(); + +// Prune expired entries +const pruned = changeLog.prune(); +``` + +## Version Tracking + +The `VersionStore` tracks the current version of each record for optimistic concurrency control. + +```typescript +import { VersionStore } from '@objectql/protocol-sync'; + +const versions = new VersionStore(); + +// Increment version on mutation +const newVersion = versions.increment('task', 'task-1'); +// => 1 + +// Get current version +const current = versions.getVersion('task', 'task-1'); +// => 1 + +// Remove on delete +versions.remove('task', 'task-1'); +``` + +## API Reference + +### Classes + +| Class | Description | +|------------------------|------------------------------------------------------| +| `SyncProtocolPlugin` | `RuntimePlugin` — registers sync handler on kernel | +| `SyncHandler` | Processes push requests with conflict detection | +| `ChangeLog` | Server-side append-only change log | +| `VersionStore` | Per-record version tracking | + +### Types + +| Type | Description | +|-----------------------------|-------------------------------------------------| +| `SyncProtocolPluginConfig` | Plugin constructor options | +| `RecordResolver` | Interface for reading/writing server records | +| `ChangeLogEntry` | A checkpoint-indexed change log entry | +| `SyncPushRequest` | Client push request payload | +| `SyncPushResponse` | Server push response payload | +| `SyncMutationResult` | Per-mutation result (`applied` \| `conflict` \| `rejected`) | +| `SyncServerChange` | A server-side change record | +| `SyncEndpointConfig` | Sync endpoint configuration | + +## License + +MIT diff --git a/packages/protocols/sync/package.json b/packages/protocols/sync/package.json new file mode 100644 index 00000000..718e1072 --- /dev/null +++ b/packages/protocols/sync/package.json @@ -0,0 +1,27 @@ +{ + "name": "@objectql/protocol-sync", + "version": "4.2.0", + "description": "Server-side sync protocol for ObjectQL - delta computation, change tracking, and conflict resolution", + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "scripts": { + "build": "tsc", + "test": "vitest run" + }, + "dependencies": { + "@objectql/types": "workspace:*" + }, + "devDependencies": { + "typescript": "^5.3.3", + "vitest": "^1.0.4" + }, + "keywords": ["objectql", "objectstack", "sync", "protocol", "offline", "plugin"], + "license": "MIT" +} diff --git a/packages/protocols/sync/src/change-log.ts b/packages/protocols/sync/src/change-log.ts new file mode 100644 index 00000000..5b7fbb60 --- /dev/null +++ b/packages/protocols/sync/src/change-log.ts @@ -0,0 +1,85 @@ +/** + * ObjectQL Sync Protocol — Server-side Change Log + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { SyncServerChange, MutationOperation } from '@objectql/types'; + +/** + * A checkpoint-indexed entry in the server change log. + */ +export interface ChangeLogEntry extends SyncServerChange { + /** Monotonic checkpoint sequence */ + readonly checkpointSeq: number; +} + +/** + * Server-side append-only change log. + * Records all mutations for delta computation during sync. + */ +export class ChangeLog { + private readonly entries: ChangeLogEntry[] = []; + private seq = 0; + private readonly retentionMs: number; + + constructor(retentionDays = 30) { + this.retentionMs = retentionDays * 24 * 60 * 60 * 1000; + } + + /** Record a change in the log */ + record(change: { + objectName: string; + recordId: string | number; + operation: MutationOperation; + data?: Record; + serverVersion: number; + }): ChangeLogEntry { + this.seq += 1; + const entry: ChangeLogEntry = { + objectName: change.objectName, + recordId: change.recordId, + operation: change.operation, + data: change.data, + serverVersion: change.serverVersion, + timestamp: new Date().toISOString(), + checkpointSeq: this.seq, + }; + this.entries.push(entry); + return entry; + } + + /** Get changes since a checkpoint (exclusive) */ + getChangesSince(checkpoint: string | null): readonly ChangeLogEntry[] { + if (checkpoint === null) { + return [...this.entries]; + } + const seq = parseInt(checkpoint, 10); + if (isNaN(seq)) return [...this.entries]; + return this.entries.filter(e => e.checkpointSeq > seq); + } + + /** Get current checkpoint string */ + getCurrentCheckpoint(): string { + return String(this.seq); + } + + /** Prune old entries based on retention policy */ + prune(): number { + const cutoff = Date.now() - this.retentionMs; + const before = this.entries.length; + for (let i = this.entries.length - 1; i >= 0; i--) { + if (new Date(this.entries[i].timestamp).getTime() < cutoff) { + this.entries.splice(i, 1); + } + } + return before - this.entries.length; + } + + /** Get total entries */ + get size(): number { + return this.entries.length; + } +} diff --git a/packages/protocols/sync/src/index.test.ts b/packages/protocols/sync/src/index.test.ts new file mode 100644 index 00000000..20220323 --- /dev/null +++ b/packages/protocols/sync/src/index.test.ts @@ -0,0 +1,415 @@ +/** + * ObjectQL Sync Protocol — Tests + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { ChangeLog } from './change-log.js'; +import { VersionStore } from './version-store.js'; +import { SyncHandler, type RecordResolver } from './sync-handler.js'; +import { SyncProtocolPlugin } from './plugin.js'; +import type { SyncPushRequest, MutationLogEntry } from '@objectql/types'; + +// ============================================================================ +// ChangeLog Tests +// ============================================================================ + +describe('ChangeLog', () => { + let log: ChangeLog; + + beforeEach(() => { + log = new ChangeLog(30); + }); + + it('should start empty', () => { + expect(log.size).toBe(0); + expect(log.getCurrentCheckpoint()).toBe('0'); + }); + + it('should record changes with incrementing checkpoints', () => { + const e1 = log.record({ objectName: 'task', recordId: '1', operation: 'create', data: { title: 'A' }, serverVersion: 1 }); + const e2 = log.record({ objectName: 'task', recordId: '2', operation: 'create', data: { title: 'B' }, serverVersion: 1 }); + + expect(e1.checkpointSeq).toBe(1); + expect(e2.checkpointSeq).toBe(2); + expect(log.size).toBe(2); + expect(log.getCurrentCheckpoint()).toBe('2'); + }); + + it('should return all changes when checkpoint is null', () => { + log.record({ objectName: 'task', recordId: '1', operation: 'create', serverVersion: 1 }); + log.record({ objectName: 'task', recordId: '2', operation: 'create', serverVersion: 1 }); + + const changes = log.getChangesSince(null); + expect(changes).toHaveLength(2); + }); + + it('should return changes since a specific checkpoint', () => { + log.record({ objectName: 'task', recordId: '1', operation: 'create', serverVersion: 1 }); + log.record({ objectName: 'task', recordId: '2', operation: 'create', serverVersion: 1 }); + log.record({ objectName: 'task', recordId: '3', operation: 'create', serverVersion: 1 }); + + const changes = log.getChangesSince('1'); + expect(changes).toHaveLength(2); + expect(changes[0].recordId).toBe('2'); + expect(changes[1].recordId).toBe('3'); + }); + + it('should return all changes for invalid checkpoint', () => { + log.record({ objectName: 'task', recordId: '1', operation: 'create', serverVersion: 1 }); + const changes = log.getChangesSince('invalid'); + expect(changes).toHaveLength(1); + }); + + it('should record timestamps', () => { + const entry = log.record({ objectName: 'task', recordId: '1', operation: 'create', serverVersion: 1 }); + expect(entry.timestamp).toBeDefined(); + expect(new Date(entry.timestamp).getTime()).not.toBeNaN(); + }); +}); + +// ============================================================================ +// VersionStore Tests +// ============================================================================ + +describe('VersionStore', () => { + let store: VersionStore; + + beforeEach(() => { + store = new VersionStore(); + }); + + it('should return 0 for unknown records', () => { + expect(store.getVersion('task', '1')).toBe(0); + }); + + it('should increment versions', () => { + expect(store.increment('task', '1')).toBe(1); + expect(store.increment('task', '1')).toBe(2); + expect(store.increment('task', '1')).toBe(3); + }); + + it('should track versions per record', () => { + store.increment('task', '1'); + store.increment('task', '1'); + store.increment('task', '2'); + + expect(store.getVersion('task', '1')).toBe(2); + expect(store.getVersion('task', '2')).toBe(1); + }); + + it('should track versions across objects', () => { + store.increment('task', '1'); + store.increment('project', '1'); + + expect(store.getVersion('task', '1')).toBe(1); + expect(store.getVersion('project', '1')).toBe(1); + }); + + it('should remove version tracking', () => { + store.increment('task', '1'); + store.increment('task', '1'); + store.remove('task', '1'); + + expect(store.getVersion('task', '1')).toBe(0); + expect(store.size).toBe(0); + }); + + it('should report correct size', () => { + store.increment('task', '1'); + store.increment('task', '2'); + store.increment('project', '1'); + + expect(store.size).toBe(3); + }); +}); + +// ============================================================================ +// SyncHandler Tests +// ============================================================================ + +describe('SyncHandler', () => { + let handler: SyncHandler; + let resolver: RecordResolver; + let records: Map>; + + beforeEach(() => { + records = new Map(); + resolver = { + getRecord: vi.fn(async (objectName: string, recordId: string | number) => { + return records.get(`${objectName}:${recordId}`) ?? null; + }), + applyMutation: vi.fn(async (mutation: MutationLogEntry, _serverVersion: number) => { + const key = `${mutation.objectName}:${mutation.recordId}`; + if (mutation.operation === 'delete') { + records.delete(key); + } else if (mutation.operation === 'create') { + records.set(key, { ...mutation.data }); + } else { + const existing = records.get(key) ?? {}; + records.set(key, { ...existing, ...mutation.data }); + } + }), + }; + handler = new SyncHandler({ + config: { enabled: true, maxMutationsPerRequest: 100 }, + }); + }); + + it('should apply create mutations', async () => { + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [{ + id: 'm1', + objectName: 'task', + recordId: '1', + operation: 'create', + data: { title: 'New Task' }, + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: 1, + baseVersion: null, + }], + lastCheckpoint: null, + }; + + const response = await handler.handlePush(request, resolver); + expect(response.results).toHaveLength(1); + expect(response.results[0].status).toBe('applied'); + expect(response.checkpoint).toBeTruthy(); + }); + + it('should apply update mutations without conflict', async () => { + // First create + records.set('task:1', { title: 'Original' }); + handler.getVersionStore().increment('task', '1'); // version 1 + + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [{ + id: 'm1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Updated' }, + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: 1, + baseVersion: 1, // matches server version + }], + lastCheckpoint: null, + }; + + const response = await handler.handlePush(request, resolver); + expect(response.results[0].status).toBe('applied'); + }); + + it('should detect conflicts on version mismatch with conflict fields', async () => { + records.set('task:1', { title: 'Server Title', status: 'active' }); + + const conflictHandler = new SyncHandler({ + config: { enabled: true }, + conflictFields: new Map([['task', ['title', 'status']]]), + }); + // Set versions to match — version 2 on server + conflictHandler.getVersionStore().increment('task', '1'); + conflictHandler.getVersionStore().increment('task', '1'); + + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [{ + id: 'm1', + objectName: 'task', + recordId: '1', + operation: 'update', + data: { title: 'Client Title' }, + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: 1, + baseVersion: 1, // behind server version 2 + }], + lastCheckpoint: null, + }; + + const response = await conflictHandler.handlePush(request, resolver); + expect(response.results[0].status).toBe('conflict'); + }); + + it('should apply delete mutations', async () => { + records.set('task:1', { title: 'To Delete' }); + handler.getVersionStore().increment('task', '1'); + + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [{ + id: 'm1', + objectName: 'task', + recordId: '1', + operation: 'delete', + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: 1, + baseVersion: 1, + }], + lastCheckpoint: null, + }; + + const response = await handler.handlePush(request, resolver); + expect(response.results[0].status).toBe('applied'); + }); + + it('should reject delete of non-existent records', async () => { + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [{ + id: 'm1', + objectName: 'task', + recordId: '999', + operation: 'delete', + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: 1, + baseVersion: null, + }], + lastCheckpoint: null, + }; + + const response = await handler.handlePush(request, resolver); + expect(response.results[0].status).toBe('rejected'); + }); + + it('should reject when mutations exceed limit', async () => { + const limitHandler = new SyncHandler({ + config: { enabled: true, maxMutationsPerRequest: 2 }, + }); + + const mutations: MutationLogEntry[] = Array.from({ length: 3 }, (_, i) => ({ + id: `m${i}`, + objectName: 'task', + recordId: String(i), + operation: 'create' as const, + data: { title: `Task ${i}` }, + timestamp: new Date().toISOString(), + clientId: 'c1', + sequence: i + 1, + baseVersion: null, + })); + + const response = await limitHandler.handlePush( + { clientId: 'c1', mutations, lastCheckpoint: null }, + resolver + ); + + expect(response.results).toHaveLength(3); + for (const result of response.results) { + expect(result.status).toBe('rejected'); + } + }); + + it('should return server changes since checkpoint', async () => { + // Record some changes on the server + handler.getChangeLog().record({ objectName: 'task', recordId: '10', operation: 'create', data: { title: 'Server' }, serverVersion: 1 }); + handler.getChangeLog().record({ objectName: 'task', recordId: '11', operation: 'create', data: { title: 'Server 2' }, serverVersion: 1 }); + + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [], + lastCheckpoint: '1', // Only want changes after checkpoint 1 + }; + + const response = await handler.handlePush(request, resolver); + expect(response.serverChanges).toHaveLength(1); + expect(response.serverChanges[0].recordId).toBe('11'); + }); + + it('should process multiple mutations in order', async () => { + const request: SyncPushRequest = { + clientId: 'c1', + mutations: [ + { + id: 'm1', objectName: 'task', recordId: '1', operation: 'create', + data: { title: 'Task 1' }, timestamp: new Date().toISOString(), + clientId: 'c1', sequence: 1, baseVersion: null, + }, + { + id: 'm2', objectName: 'task', recordId: '2', operation: 'create', + data: { title: 'Task 2' }, timestamp: new Date().toISOString(), + clientId: 'c1', sequence: 2, baseVersion: null, + }, + ], + lastCheckpoint: null, + }; + + const response = await handler.handlePush(request, resolver); + expect(response.results).toHaveLength(2); + expect(response.results[0].status).toBe('applied'); + expect(response.results[1].status).toBe('applied'); + }); +}); + +// ============================================================================ +// SyncProtocolPlugin Tests +// ============================================================================ + +describe('SyncProtocolPlugin', () => { + it('should have correct name and version', () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: true }, + }); + expect(plugin.name).toBe('@objectql/protocol-sync'); + expect(plugin.version).toBe('4.2.0'); + }); + + it('should install handler on kernel when enabled', async () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: true }, + }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + + expect(kernel['syncProtocol']).toBeDefined(); + expect((kernel['syncProtocol'] as Record)['handler']).toBeDefined(); + expect((kernel['syncProtocol'] as Record)['handlePush']).toBeDefined(); + }); + + it('should not install when disabled', async () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: false }, + }); + const kernel: Record = {}; + await plugin.install({ engine: kernel }); + + expect(kernel['syncProtocol']).toBeUndefined(); + }); + + it('should return handler after install', async () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: true }, + }); + await plugin.install({ engine: {} }); + expect(plugin.getHandler()).not.toBeNull(); + }); + + it('should clear handler on stop', async () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: true }, + }); + const ctx = { engine: {} }; + await plugin.install(ctx); + await plugin.onStop(ctx); + expect(plugin.getHandler()).toBeNull(); + }); + + it('should support onStart lifecycle', async () => { + const plugin = new SyncProtocolPlugin({ + endpoint: { enabled: true }, + }); + const ctx = { engine: {} }; + await plugin.install(ctx); + await plugin.onStart(ctx); + // Should not throw + }); +}); diff --git a/packages/protocols/sync/src/index.ts b/packages/protocols/sync/src/index.ts new file mode 100644 index 00000000..95d477b7 --- /dev/null +++ b/packages/protocols/sync/src/index.ts @@ -0,0 +1,32 @@ +/** + * ObjectQL Sync Protocol + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +// Main plugin export +export { SyncProtocolPlugin } from './plugin.js'; +export type { SyncProtocolPluginConfig } from './plugin.js'; + +// Core components +export { SyncHandler } from './sync-handler.js'; +export type { RecordResolver } from './sync-handler.js'; + +export { ChangeLog } from './change-log.js'; +export type { ChangeLogEntry } from './change-log.js'; + +export { VersionStore } from './version-store.js'; + +// Re-export sync types from @objectql/types +export type { + SyncPushRequest, + SyncPushResponse, + SyncMutationResult, + SyncServerChange, + SyncConflict, + SyncEndpointConfig, + MutationLogEntry, + MutationOperation, +} from '@objectql/types'; diff --git a/packages/protocols/sync/src/plugin.ts b/packages/protocols/sync/src/plugin.ts new file mode 100644 index 00000000..531f1fc5 --- /dev/null +++ b/packages/protocols/sync/src/plugin.ts @@ -0,0 +1,68 @@ +/** + * ObjectQL Sync Protocol — Plugin + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { RuntimePlugin, RuntimeContext, SyncEndpointConfig, SyncPushRequest } from '@objectql/types'; +import { SyncHandler, type RecordResolver } from './sync-handler.js'; + +/** + * Configuration for the sync protocol plugin. + */ +export interface SyncProtocolPluginConfig { + /** Sync endpoint configuration */ + readonly endpoint: SyncEndpointConfig; + /** Per-object conflict field definitions */ + readonly conflictFields?: ReadonlyMap; +} + +/** + * ObjectQL Sync Protocol Plugin + * + * Provides server-side sync endpoint for handling client push/pull requests. + * Implements delta computation and change tracking. + */ +export class SyncProtocolPlugin implements RuntimePlugin { + name = '@objectql/protocol-sync'; + version = '4.2.0'; + + private readonly config: SyncProtocolPluginConfig; + private handler: SyncHandler | null = null; + + constructor(config: SyncProtocolPluginConfig) { + this.config = config; + } + + async install(ctx: RuntimeContext): Promise { + if (!this.config.endpoint.enabled) return; + + this.handler = new SyncHandler({ + config: this.config.endpoint, + conflictFields: this.config.conflictFields, + }); + + // Register sync handler on the kernel + const kernel = ctx.engine as Record; + kernel['syncProtocol'] = { + handler: this.handler, + handlePush: (request: SyncPushRequest, resolver: RecordResolver) => + this.handler!.handlePush(request, resolver), + }; + } + + async onStart(_ctx: RuntimeContext): Promise { + // Protocol is ready to accept requests + } + + async onStop(_ctx: RuntimeContext): Promise { + this.handler = null; + } + + /** Get the sync handler (available after install) */ + getHandler(): SyncHandler | null { + return this.handler; + } +} diff --git a/packages/protocols/sync/src/sync-handler.ts b/packages/protocols/sync/src/sync-handler.ts new file mode 100644 index 00000000..5de9f2b8 --- /dev/null +++ b/packages/protocols/sync/src/sync-handler.ts @@ -0,0 +1,194 @@ +/** + * ObjectQL Sync Protocol — Sync Request Handler + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import type { + SyncPushRequest, + SyncPushResponse, + SyncMutationResult, + SyncServerChange, + MutationLogEntry, + SyncConflict, + SyncEndpointConfig, +} from '@objectql/types'; +import { ChangeLog } from './change-log.js'; +import { VersionStore } from './version-store.js'; + +/** + * Server-side record resolver. + * The sync handler uses this to read current server state and apply mutations. + */ +export interface RecordResolver { + getRecord(objectName: string, recordId: string | number): Promise | null>; + applyMutation(mutation: MutationLogEntry, serverVersion: number): Promise; +} + +/** + * Server-side sync request handler. + * Processes push requests, applies mutations, detects conflicts, and returns deltas. + */ +export class SyncHandler { + private readonly changeLog: ChangeLog; + private readonly versionStore: VersionStore; + private readonly config: SyncEndpointConfig; + private readonly conflictFields: ReadonlyMap; + + constructor(options: { + config: SyncEndpointConfig; + conflictFields?: ReadonlyMap; + }) { + this.config = options.config; + this.changeLog = new ChangeLog(options.config.changeLogRetentionDays ?? 30); + this.versionStore = new VersionStore(); + this.conflictFields = options.conflictFields ?? new Map(); + } + + /** Process a sync push request */ + async handlePush(request: SyncPushRequest, resolver: RecordResolver): Promise { + const maxMutations = this.config.maxMutationsPerRequest ?? 100; + + if (request.mutations.length > maxMutations) { + const results: SyncMutationResult[] = request.mutations.map(() => ({ + status: 'rejected' as const, + reason: `Exceeds maximum mutations per request (${maxMutations})`, + })); + return { + results, + serverChanges: [], + checkpoint: this.changeLog.getCurrentCheckpoint(), + }; + } + + const results: SyncMutationResult[] = []; + + for (const mutation of request.mutations) { + const result = await this.processMutation(mutation, resolver); + results.push(result); + } + + // Get server changes since client's last checkpoint + const changes = this.changeLog.getChangesSince(request.lastCheckpoint); + const serverChanges: SyncServerChange[] = changes.map(entry => ({ + objectName: entry.objectName, + recordId: entry.recordId, + operation: entry.operation, + data: entry.data, + serverVersion: entry.serverVersion, + timestamp: entry.timestamp, + })); + + return { + results, + serverChanges, + checkpoint: this.changeLog.getCurrentCheckpoint(), + }; + } + + /** Get the change log (for testing/debugging) */ + getChangeLog(): ChangeLog { + return this.changeLog; + } + + /** Get the version store (for testing/debugging) */ + getVersionStore(): VersionStore { + return this.versionStore; + } + + private async processMutation( + mutation: MutationLogEntry, + resolver: RecordResolver + ): Promise { + const currentVersion = this.versionStore.getVersion(mutation.objectName, mutation.recordId); + + // Handle create operations + if (mutation.operation === 'create') { + const newVersion = this.versionStore.increment(mutation.objectName, mutation.recordId); + await resolver.applyMutation(mutation, newVersion); + this.changeLog.record({ + objectName: mutation.objectName, + recordId: mutation.recordId, + operation: 'create', + data: mutation.data, + serverVersion: newVersion, + }); + return { status: 'applied', serverVersion: newVersion }; + } + + // Handle delete operations + if (mutation.operation === 'delete') { + if (currentVersion === 0) { + return { status: 'rejected', reason: 'Record not found' }; + } + const newVersion = this.versionStore.increment(mutation.objectName, mutation.recordId); + await resolver.applyMutation(mutation, newVersion); + this.changeLog.record({ + objectName: mutation.objectName, + recordId: mutation.recordId, + operation: 'delete', + serverVersion: newVersion, + }); + this.versionStore.remove(mutation.objectName, mutation.recordId); + return { status: 'applied', serverVersion: newVersion }; + } + + // Handle update operations — check for conflicts + if (mutation.baseVersion !== null && mutation.baseVersion < currentVersion) { + const serverRecord = await resolver.getRecord(mutation.objectName, mutation.recordId); + if (!serverRecord) { + return { status: 'rejected', reason: 'Record not found' }; + } + + const objectConflictFields = this.conflictFields.get(mutation.objectName) ?? []; + const conflictingFields = this.detectConflictingFields(mutation, serverRecord, objectConflictFields); + + if (conflictingFields.length > 0) { + const conflict: SyncConflict = { + objectName: mutation.objectName, + recordId: mutation.recordId, + clientMutation: mutation, + serverRecord, + conflictingFields, + }; + return { status: 'conflict', conflict }; + } + } + + // No conflict — apply the update + const newVersion = this.versionStore.increment(mutation.objectName, mutation.recordId); + await resolver.applyMutation(mutation, newVersion); + this.changeLog.record({ + objectName: mutation.objectName, + recordId: mutation.recordId, + operation: 'update', + data: mutation.data, + serverVersion: newVersion, + }); + return { status: 'applied', serverVersion: newVersion }; + } + + private detectConflictingFields( + mutation: MutationLogEntry, + serverRecord: Record, + conflictFields: readonly string[] + ): string[] { + if (conflictFields.length === 0) { + // If no specific conflict fields defined, treat all changed fields as potential conflicts + return Object.keys(mutation.data ?? {}); + } + + const conflicts: string[] = []; + const clientData = mutation.data ?? {}; + + for (const field of conflictFields) { + if (field in clientData && clientData[field] !== serverRecord[field]) { + conflicts.push(field); + } + } + + return conflicts; + } +} diff --git a/packages/protocols/sync/src/version-store.ts b/packages/protocols/sync/src/version-store.ts new file mode 100644 index 00000000..87c0d822 --- /dev/null +++ b/packages/protocols/sync/src/version-store.ts @@ -0,0 +1,43 @@ +/** + * ObjectQL Sync Protocol — Server-side Version Store + * Copyright (c) 2026-present ObjectStack Inc. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +/** + * Server-side record version store. + * Tracks the current version of each record for optimistic concurrency control. + */ +export class VersionStore { + private readonly versions = new Map(); + + private key(objectName: string, recordId: string | number): string { + return `${objectName}:${String(recordId)}`; + } + + /** Get the current version of a record */ + getVersion(objectName: string, recordId: string | number): number { + return this.versions.get(this.key(objectName, recordId)) ?? 0; + } + + /** Increment and return the new version */ + increment(objectName: string, recordId: string | number): number { + const k = this.key(objectName, recordId); + const current = this.versions.get(k) ?? 0; + const next = current + 1; + this.versions.set(k, next); + return next; + } + + /** Remove version tracking for a deleted record */ + remove(objectName: string, recordId: string | number): void { + this.versions.delete(this.key(objectName, recordId)); + } + + /** Get total tracked records */ + get size(): number { + return this.versions.size; + } +} diff --git a/packages/protocols/sync/tsconfig.json b/packages/protocols/sync/tsconfig.json new file mode 100644 index 00000000..944c41ec --- /dev/null +++ b/packages/protocols/sync/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts"] +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 26a2b92b..4a7c2b02 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -572,6 +572,19 @@ importers: specifier: ^5.3.0 version: 5.9.3 + packages/foundation/edge-adapter: + dependencies: + '@objectql/types': + specifier: workspace:* + version: link:../types + devDependencies: + typescript: + specifier: ^5.3.0 + version: 5.9.3 + vitest: + specifier: ^1.0.4 + version: 1.6.1(@types/node@25.1.0)(@vitest/ui@1.6.1)(lightningcss@1.30.2) + packages/foundation/platform-node: dependencies: '@objectql/core': @@ -651,6 +664,19 @@ importers: specifier: ^5.3.0 version: 5.9.3 + packages/foundation/plugin-sync: + dependencies: + '@objectql/types': + specifier: workspace:* + version: link:../types + devDependencies: + typescript: + specifier: ^5.3.0 + version: 5.9.3 + vitest: + specifier: ^1.0.4 + version: 1.6.1(@types/node@25.1.0)(@vitest/ui@1.6.1)(lightningcss@1.30.2) + packages/foundation/plugin-validator: dependencies: '@objectql/types': @@ -830,6 +856,19 @@ importers: specifier: ^1.0.4 version: 1.6.1(@types/node@25.1.0)(@vitest/ui@1.6.1)(lightningcss@1.30.2) + packages/protocols/sync: + dependencies: + '@objectql/types': + specifier: workspace:* + version: link:../../foundation/types + devDependencies: + typescript: + specifier: ^5.3.3 + version: 5.9.3 + vitest: + specifier: ^1.0.4 + version: 1.6.1(@types/node@25.1.0)(@vitest/ui@1.6.1)(lightningcss@1.30.2) + packages/tools/cli: dependencies: '@objectql/core': @@ -1783,105 +1822,89 @@ packages: resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==} cpu: [arm64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-arm@1.2.4': resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==} cpu: [arm] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-ppc64@1.2.4': resolution: {integrity: sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==} cpu: [ppc64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-riscv64@1.2.4': resolution: {integrity: sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==} cpu: [riscv64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-s390x@1.2.4': resolution: {integrity: sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==} cpu: [s390x] os: [linux] - libc: [glibc] '@img/sharp-libvips-linux-x64@1.2.4': resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==} cpu: [x64] os: [linux] - libc: [glibc] '@img/sharp-libvips-linuxmusl-arm64@1.2.4': resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==} cpu: [arm64] os: [linux] - libc: [musl] '@img/sharp-libvips-linuxmusl-x64@1.2.4': resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==} cpu: [x64] os: [linux] - libc: [musl] '@img/sharp-linux-arm64@0.34.5': resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - libc: [glibc] '@img/sharp-linux-arm@0.34.5': resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm] os: [linux] - libc: [glibc] '@img/sharp-linux-ppc64@0.34.5': resolution: {integrity: sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ppc64] os: [linux] - libc: [glibc] '@img/sharp-linux-riscv64@0.34.5': resolution: {integrity: sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [riscv64] os: [linux] - libc: [glibc] '@img/sharp-linux-s390x@0.34.5': resolution: {integrity: sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [s390x] os: [linux] - libc: [glibc] '@img/sharp-linux-x64@0.34.5': resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - libc: [glibc] '@img/sharp-linuxmusl-arm64@0.34.5': resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] - libc: [musl] '@img/sharp-linuxmusl-x64@0.34.5': resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] - libc: [musl] '@img/sharp-wasm32@0.34.5': resolution: {integrity: sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==} @@ -1986,28 +2009,24 @@ packages: engines: {node: '>= 10'} cpu: [arm64] os: [linux] - libc: [glibc] '@next/swc-linux-arm64-musl@16.1.6': resolution: {integrity: sha512-S4J2v+8tT3NIO9u2q+S0G5KdvNDjXfAv06OhfOzNDaBn5rw84DGXWndOEB7d5/x852A20sW1M56vhC/tRVbccQ==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - libc: [musl] '@next/swc-linux-x64-gnu@16.1.6': resolution: {integrity: sha512-2eEBDkFlMMNQnkTyPBhQOAyn2qMxyG2eE7GPH2WIDGEpEILcBPI/jdSv4t6xupSP+ot/jkfrCShLAa7+ZUPcJQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - libc: [glibc] '@next/swc-linux-x64-musl@16.1.6': resolution: {integrity: sha512-oicJwRlyOoZXVlxmIMaTq7f8pN9QNbdes0q2FXfRsPhfCi8n8JmOZJm5oo1pwDaFbnnD421rVU409M3evFbIqg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - libc: [musl] '@next/swc-win32-arm64-msvc@16.1.6': resolution: {integrity: sha512-gQmm8izDTPgs+DCWH22kcDmuUp7NyiJgEl18bcr8irXA5N2m2O+JQIr6f3ct42GOs9c0h8QF3L5SzIxcYAAXXw==} @@ -2550,79 +2569,66 @@ packages: resolution: {integrity: sha512-eyrr5W08Ms9uM0mLcKfM/Uzx7hjhz2bcjv8P2uynfj0yU8GGPdz8iYrBPhiLOZqahoAMB8ZiolRZPbbU2MAi6Q==} cpu: [arm] os: [linux] - libc: [glibc] '@rollup/rollup-linux-arm-musleabihf@4.57.0': resolution: {integrity: sha512-Xds90ITXJCNyX9pDhqf85MKWUI4lqjiPAipJ8OLp8xqI2Ehk+TCVhF9rvOoN8xTbcafow3QOThkNnrM33uCFQA==} cpu: [arm] os: [linux] - libc: [musl] '@rollup/rollup-linux-arm64-gnu@4.57.0': resolution: {integrity: sha512-Xws2KA4CLvZmXjy46SQaXSejuKPhwVdaNinldoYfqruZBaJHqVo6hnRa8SDo9z7PBW5x84SH64+izmldCgbezw==} cpu: [arm64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-arm64-musl@4.57.0': resolution: {integrity: sha512-hrKXKbX5FdaRJj7lTMusmvKbhMJSGWJ+w++4KmjiDhpTgNlhYobMvKfDoIWecy4O60K6yA4SnztGuNTQF+Lplw==} cpu: [arm64] os: [linux] - libc: [musl] '@rollup/rollup-linux-loong64-gnu@4.57.0': resolution: {integrity: sha512-6A+nccfSDGKsPm00d3xKcrsBcbqzCTAukjwWK6rbuAnB2bHaL3r9720HBVZ/no7+FhZLz/U3GwwZZEh6tOSI8Q==} cpu: [loong64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-loong64-musl@4.57.0': resolution: {integrity: sha512-4P1VyYUe6XAJtQH1Hh99THxr0GKMMwIXsRNOceLrJnaHTDgk1FTcTimDgneRJPvB3LqDQxUmroBclQ1S0cIJwQ==} cpu: [loong64] os: [linux] - libc: [musl] '@rollup/rollup-linux-ppc64-gnu@4.57.0': resolution: {integrity: sha512-8Vv6pLuIZCMcgXre6c3nOPhE0gjz1+nZP6T+hwWjr7sVH8k0jRkH+XnfjjOTglyMBdSKBPPz54/y1gToSKwrSQ==} cpu: [ppc64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-ppc64-musl@4.57.0': resolution: {integrity: sha512-r1te1M0Sm2TBVD/RxBPC6RZVwNqUTwJTA7w+C/IW5v9Ssu6xmxWEi+iJQlpBhtUiT1raJ5b48pI8tBvEjEFnFA==} cpu: [ppc64] os: [linux] - libc: [musl] '@rollup/rollup-linux-riscv64-gnu@4.57.0': resolution: {integrity: sha512-say0uMU/RaPm3CDQLxUUTF2oNWL8ysvHkAjcCzV2znxBr23kFfaxocS9qJm+NdkRhF8wtdEEAJuYcLPhSPbjuQ==} cpu: [riscv64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-riscv64-musl@4.57.0': resolution: {integrity: sha512-/MU7/HizQGsnBREtRpcSbSV1zfkoxSTR7wLsRmBPQ8FwUj5sykrP1MyJTvsxP5KBq9SyE6kH8UQQQwa0ASeoQQ==} cpu: [riscv64] os: [linux] - libc: [musl] '@rollup/rollup-linux-s390x-gnu@4.57.0': resolution: {integrity: sha512-Q9eh+gUGILIHEaJf66aF6a414jQbDnn29zeu0eX3dHMuysnhTvsUvZTCAyZ6tJhUjnvzBKE4FtuaYxutxRZpOg==} cpu: [s390x] os: [linux] - libc: [glibc] '@rollup/rollup-linux-x64-gnu@4.57.0': resolution: {integrity: sha512-OR5p5yG5OKSxHReWmwvM0P+VTPMwoBS45PXTMYaskKQqybkS3Kmugq1W+YbNWArF8/s7jQScgzXUhArzEQ7x0A==} cpu: [x64] os: [linux] - libc: [glibc] '@rollup/rollup-linux-x64-musl@4.57.0': resolution: {integrity: sha512-XeatKzo4lHDsVEbm1XDHZlhYZZSQYym6dg2X/Ko0kSFgio+KXLsxwJQprnR48GvdIKDOpqWqssC3iBCjoMcMpw==} cpu: [x64] os: [linux] - libc: [musl] '@rollup/rollup-openbsd-x64@4.57.0': resolution: {integrity: sha512-Lu71y78F5qOfYmubYLHPcJm74GZLU6UJ4THkf/a1K7Tz2ycwC2VUbsqbJAXaR6Bx70SRdlVrt2+n5l7F0agTUw==} @@ -2801,28 +2807,24 @@ packages: engines: {node: '>= 10'} cpu: [arm64] os: [linux] - libc: [glibc] '@tailwindcss/oxide-linux-arm64-musl@4.1.18': resolution: {integrity: sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - libc: [musl] '@tailwindcss/oxide-linux-x64-gnu@4.1.18': resolution: {integrity: sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - libc: [glibc] '@tailwindcss/oxide-linux-x64-musl@4.1.18': resolution: {integrity: sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - libc: [musl] '@tailwindcss/oxide-wasm32-wasi@4.1.18': resolution: {integrity: sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA==} @@ -5212,28 +5214,24 @@ packages: engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - libc: [glibc] lightningcss-linux-arm64-musl@1.30.2: resolution: {integrity: sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==} engines: {node: '>= 12.0.0'} cpu: [arm64] os: [linux] - libc: [musl] lightningcss-linux-x64-gnu@1.30.2: resolution: {integrity: sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - libc: [glibc] lightningcss-linux-x64-musl@1.30.2: resolution: {integrity: sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==} engines: {node: '>= 12.0.0'} cpu: [x64] os: [linux] - libc: [musl] lightningcss-win32-arm64-msvc@1.30.2: resolution: {integrity: sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==} diff --git a/vitest.config.ts b/vitest.config.ts index 719cb80d..94182e87 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -25,9 +25,12 @@ export default defineConfig({ '@objectql/plugin-validator': path.resolve(__dirname, './packages/foundation/plugin-validator/src'), '@objectql/plugin-formula': path.resolve(__dirname, './packages/foundation/plugin-formula/src'), '@objectql/plugin-security': path.resolve(__dirname, './packages/foundation/plugin-security/src'), + '@objectql/plugin-sync': path.resolve(__dirname, './packages/foundation/plugin-sync/src'), + '@objectql/edge-adapter': path.resolve(__dirname, './packages/foundation/edge-adapter/src'), '@objectql/protocol-graphql': path.resolve(__dirname, './packages/protocols/graphql/src'), '@objectql/protocol-odata-v4': path.resolve(__dirname, './packages/protocols/odata-v4/src'), '@objectql/protocol-json-rpc': path.resolve(__dirname, './packages/protocols/json-rpc/src'), + '@objectql/protocol-sync': path.resolve(__dirname, './packages/protocols/sync/src'), // Drivers '@objectql/driver-memory': path.resolve(__dirname, './packages/drivers/memory/src'), '@objectql/driver-mongo': path.resolve(__dirname, './packages/drivers/mongo/src'),