From 12e0833de855dd9e2430ced91615ae4f4444d049 Mon Sep 17 00:00:00 2001 From: Alessandro Franceschi Date: Tue, 6 Jan 2026 23:33:10 +0100 Subject: [PATCH 01/12] feat(hiera): implement Hiera and local Puppet codebase integration Add HieraParser, HieraScanner, HieraResolver, HieraService for Hiera data handling Add FactService with PuppetDB integration and local fallback Add CodeAnalyzer for unused code detection and linting Add ForgeClient and PuppetfileParser for module management Add CatalogCompiler for catalog compilation mode Add HieraPlugin following existing integration architecture Add API routes for Hiera endpoints Add NodeHieraTab, GlobalHieraTab, CodeAnalysisTab frontend components Add HieraSetupGuide for integration setup Add 16 property-based tests and unit tests Update IntegrationStatus to display Hiera health --- .kiro/debug-inventory-linking.js | 28 +- .../hiera-codebase-integration/design.md | 970 +++++++++++++ .../requirements.md | 213 +++ .../specs/hiera-codebase-integration/tasks.md | 593 ++++++++ .../inventory-multiple-source-tags-bug.md | 2 +- README.md | 1 + backend/.env.example | 27 + backend/package.json | 3 +- backend/src/config/ConfigService.ts | 159 +++ backend/src/config/schema.ts | 83 +- .../src/integrations/hiera/CatalogCompiler.ts | 491 +++++++ .../src/integrations/hiera/CodeAnalyzer.ts | 1230 +++++++++++++++++ backend/src/integrations/hiera/FactService.ts | 472 +++++++ backend/src/integrations/hiera/ForgeClient.ts | 511 +++++++ backend/src/integrations/hiera/HieraParser.ts | 789 +++++++++++ backend/src/integrations/hiera/HieraPlugin.ts | 746 ++++++++++ .../src/integrations/hiera/HieraResolver.ts | 885 ++++++++++++ .../src/integrations/hiera/HieraScanner.ts | 722 ++++++++++ .../src/integrations/hiera/HieraService.ts | 960 +++++++++++++ .../integrations/hiera/PuppetfileParser.ts | 464 +++++++ backend/src/integrations/hiera/index.ts | 60 + backend/src/integrations/hiera/types.ts | 527 +++++++ backend/src/routes/hiera.ts | 939 +++++++++++++ backend/src/routes/integrations.ts | 19 +- backend/src/server.ts | 62 + .../bolt-plugin-integration.test.ts | 14 +- .../integration/integration-status.test.ts | 32 +- .../test/integrations/CodeAnalyzer.test.ts | 551 ++++++++ backend/test/integrations/FactService.test.ts | 388 ++++++ backend/test/integrations/ForgeClient.test.ts | 259 ++++ backend/test/integrations/HieraParser.test.ts | 499 +++++++ backend/test/integrations/HieraPlugin.test.ts | 473 +++++++ .../test/integrations/HieraScanner.test.ts | 421 ++++++ .../test/integrations/HieraService.test.ts | 524 +++++++ .../integrations/PuppetfileParser.test.ts | 305 ++++ .../test/properties/hiera/property-10.test.ts | 341 +++++ .../test/properties/hiera/property-11.test.ts | 307 ++++ .../test/properties/hiera/property-12.test.ts | 268 ++++ .../test/properties/hiera/property-13.test.ts | 320 +++++ .../test/properties/hiera/property-14.test.ts | 378 +++++ .../test/properties/hiera/property-15.test.ts | 434 ++++++ .../test/properties/hiera/property-24.test.ts | 483 +++++++ .../test/properties/hiera/property-28.test.ts | 376 +++++ .../test/properties/hiera/property-29.test.ts | 429 ++++++ .../test/properties/hiera/property-3.test.ts | 371 +++++ .../test/properties/hiera/property-4.test.ts | 366 +++++ .../test/properties/hiera/property-5.test.ts | 389 ++++++ .../test/properties/hiera/property-6.test.ts | 445 ++++++ .../test/properties/hiera/property-7.test.ts | 345 +++++ .../test/properties/hiera/property-8.test.ts | 298 ++++ .../test/properties/hiera/property-9.test.ts | 268 ++++ frontend/src/components/CatalogViewer.svelte | 67 +- .../src/components/CodeAnalysisTab.svelte | 832 +++++++++++ frontend/src/components/EventsViewer.svelte | 123 +- frontend/src/components/GlobalHieraTab.svelte | 513 +++++++ .../src/components/HieraSetupGuide.svelte | 606 ++++++++ .../src/components/IntegrationStatus.svelte | 448 +++++- frontend/src/components/NodeHieraTab.svelte | 634 +++++++++ .../src/components/PuppetdbSetupGuide.svelte | 27 +- .../src/components/README_RE_EXECUTION.md | 89 -- frontend/src/components/index.ts | 4 + .../src/pages/IntegrationSetupPage.svelte | 22 +- frontend/src/pages/NodeDetailPage.svelte | 132 +- frontend/src/pages/PuppetPage.svelte | 77 +- package-lock.json | 32 +- scripts/update-version.js | 24 +- 66 files changed, 23521 insertions(+), 349 deletions(-) create mode 100644 .kiro/specs/hiera-codebase-integration/design.md create mode 100644 .kiro/specs/hiera-codebase-integration/requirements.md create mode 100644 .kiro/specs/hiera-codebase-integration/tasks.md create mode 100644 backend/src/integrations/hiera/CatalogCompiler.ts create mode 100644 backend/src/integrations/hiera/CodeAnalyzer.ts create mode 100644 backend/src/integrations/hiera/FactService.ts create mode 100644 backend/src/integrations/hiera/ForgeClient.ts create mode 100644 backend/src/integrations/hiera/HieraParser.ts create mode 100644 backend/src/integrations/hiera/HieraPlugin.ts create mode 100644 backend/src/integrations/hiera/HieraResolver.ts create mode 100644 backend/src/integrations/hiera/HieraScanner.ts create mode 100644 backend/src/integrations/hiera/HieraService.ts create mode 100644 backend/src/integrations/hiera/PuppetfileParser.ts create mode 100644 backend/src/integrations/hiera/index.ts create mode 100644 backend/src/integrations/hiera/types.ts create mode 100644 backend/src/routes/hiera.ts create mode 100644 backend/test/integrations/CodeAnalyzer.test.ts create mode 100644 backend/test/integrations/FactService.test.ts create mode 100644 backend/test/integrations/ForgeClient.test.ts create mode 100644 backend/test/integrations/HieraParser.test.ts create mode 100644 backend/test/integrations/HieraPlugin.test.ts create mode 100644 backend/test/integrations/HieraScanner.test.ts create mode 100644 backend/test/integrations/HieraService.test.ts create mode 100644 backend/test/integrations/PuppetfileParser.test.ts create mode 100644 backend/test/properties/hiera/property-10.test.ts create mode 100644 backend/test/properties/hiera/property-11.test.ts create mode 100644 backend/test/properties/hiera/property-12.test.ts create mode 100644 backend/test/properties/hiera/property-13.test.ts create mode 100644 backend/test/properties/hiera/property-14.test.ts create mode 100644 backend/test/properties/hiera/property-15.test.ts create mode 100644 backend/test/properties/hiera/property-24.test.ts create mode 100644 backend/test/properties/hiera/property-28.test.ts create mode 100644 backend/test/properties/hiera/property-29.test.ts create mode 100644 backend/test/properties/hiera/property-3.test.ts create mode 100644 backend/test/properties/hiera/property-4.test.ts create mode 100644 backend/test/properties/hiera/property-5.test.ts create mode 100644 backend/test/properties/hiera/property-6.test.ts create mode 100644 backend/test/properties/hiera/property-7.test.ts create mode 100644 backend/test/properties/hiera/property-8.test.ts create mode 100644 backend/test/properties/hiera/property-9.test.ts create mode 100644 frontend/src/components/CodeAnalysisTab.svelte create mode 100644 frontend/src/components/GlobalHieraTab.svelte create mode 100644 frontend/src/components/HieraSetupGuide.svelte create mode 100644 frontend/src/components/NodeHieraTab.svelte delete mode 100644 frontend/src/components/README_RE_EXECUTION.md diff --git a/.kiro/debug-inventory-linking.js b/.kiro/debug-inventory-linking.js index 5ded672..63ac5c2 100644 --- a/.kiro/debug-inventory-linking.js +++ b/.kiro/debug-inventory-linking.js @@ -2,7 +2,7 @@ /** * Debug script to test inventory API and node linking behavior - * + * * This script will: * 1. Fetch the inventory from the API * 2. Check which sources each node appears in @@ -58,11 +58,11 @@ async function debugInventoryLinking() { try { console.log('šŸ” Fetching inventory from API...'); const response = await makeRequest(API_PATH); - + console.log('\nšŸ“Š Inventory Summary:'); console.log(`Total nodes: ${response.nodes?.length || 0}`); console.log(`Sources: ${Object.keys(response.sources || {}).join(', ')}`); - + if (!response.nodes || response.nodes.length === 0) { console.log('āŒ No nodes found in inventory'); return; @@ -70,16 +70,16 @@ async function debugInventoryLinking() { console.log('\nšŸ·ļø Node Source Analysis:'); console.log('='.repeat(80)); - + const nodesBySource = {}; const multiSourceNodes = []; - + for (const node of response.nodes) { const sources = node.sources || [node.source || 'bolt']; const sourcesStr = sources.join(', '); - + console.log(`${node.name.padEnd(25)} | Sources: [${sourcesStr.padEnd(20)}] | Linked: ${node.linked || false}`); - + // Track nodes by source for (const source of sources) { if (!nodesBySource[source]) { @@ -87,7 +87,7 @@ async function debugInventoryLinking() { } nodesBySource[source].push(node.name); } - + // Track multi-source nodes if (sources.length > 1) { multiSourceNodes.push({ @@ -97,14 +97,14 @@ async function debugInventoryLinking() { }); } } - + console.log('\nšŸ“ˆ Source Breakdown:'); console.log('='.repeat(50)); for (const [source, nodes] of Object.entries(nodesBySource)) { console.log(`${source}: ${nodes.length} nodes`); console.log(` - ${nodes.join(', ')}`); } - + console.log('\nšŸ”— Multi-Source Nodes:'); console.log('='.repeat(50)); if (multiSourceNodes.length === 0) { @@ -115,7 +115,7 @@ async function debugInventoryLinking() { console.log(`āœ… ${node.name}: [${node.sources.join(', ')}] (linked: ${node.linked})`); } } - + // Specific check for puppet.office.lab42 console.log('\nšŸŽÆ Specific Node Analysis: puppet.office.lab42'); console.log('='.repeat(50)); @@ -128,7 +128,7 @@ async function debugInventoryLinking() { console.log(`Sources Array: [${(puppetNode.sources || []).join(', ')}]`); console.log(`Linked: ${puppetNode.linked || false}`); console.log(`Transport: ${puppetNode.transport}`); - + if (puppetNode.sources && puppetNode.sources.length === 1) { console.log('āš ļø ISSUE: This node only shows one source but should show multiple'); console.log(' Expected: Should appear in both Bolt and PuppetDB inventories'); @@ -136,7 +136,7 @@ async function debugInventoryLinking() { } else { console.log('āŒ puppet.office.lab42 not found in inventory'); } - + } catch (error) { console.error('āŒ Error debugging inventory:', error.message); console.log('\nšŸ’” Troubleshooting:'); @@ -149,4 +149,4 @@ async function debugInventoryLinking() { // Run the debug script console.log('šŸš€ Starting Inventory Linking Debug Script'); console.log(`Connecting to: http://${API_HOST}:${API_PORT}${API_PATH}`); -debugInventoryLinking(); \ No newline at end of file +debugInventoryLinking(); diff --git a/.kiro/specs/hiera-codebase-integration/design.md b/.kiro/specs/hiera-codebase-integration/design.md new file mode 100644 index 0000000..7d6537b --- /dev/null +++ b/.kiro/specs/hiera-codebase-integration/design.md @@ -0,0 +1,970 @@ +# Design Document: Hiera and Local Puppet Codebase Integration + +## Overview + +This design document describes the architecture and implementation approach for integrating Hiera data lookup and Puppet codebase analysis into Pabawi v0.4.0. The integration follows the existing plugin architecture pattern used by PuppetDB and Puppetserver integrations, providing a consistent user experience while adding powerful new capabilities for Puppet administrators. + +The integration enables: + +- Configuration of a local Puppet control repository +- Parsing and resolution of Hiera data with full lookup method support +- Node-specific Hiera key visualization with usage highlighting +- Global Hiera key search across all nodes +- Static code analysis of Puppet manifests +- Module update detection from Puppetfile + +## Architecture + +### High-Level Architecture + +```mermaid +graph TB + subgraph Frontend + UI[Svelte UI Components] + NodeHieraTab[Node Hiera Tab] + GlobalHieraTab[Global Hiera Tab] + CodeAnalysisTab[Code Analysis Tab] + SetupGuide[Hiera Setup Guide] + end + + subgraph Backend + API[REST API Routes] + HieraPlugin[HieraPlugin] + HieraService[HieraService] + HieraParser[HieraParser] + HieraResolver[HieraResolver] + HieraScanner[HieraScanner] + CodeAnalyzer[CodeAnalyzer] + FactService[FactService] + end + + subgraph External + ControlRepo[Control Repository] + PuppetDB[PuppetDB Integration] + LocalFacts[Local Fact Files] + end + + UI --> API + NodeHieraTab --> API + GlobalHieraTab --> API + CodeAnalysisTab --> API + SetupGuide --> API + + API --> HieraPlugin + HieraPlugin --> HieraService + HieraService --> HieraParser + HieraService --> HieraResolver + HieraService --> HieraScanner + HieraService --> CodeAnalyzer + HieraService --> FactService + + HieraParser --> ControlRepo + HieraScanner --> ControlRepo + CodeAnalyzer --> ControlRepo + FactService --> PuppetDB + FactService --> LocalFacts +``` + +### Component Architecture + +```mermaid +graph LR + subgraph Integration Layer + IM[IntegrationManager] + HP[HieraPlugin] + end + + subgraph Service Layer + HS[HieraService] + FS[FactService] + CA[CodeAnalyzer] + end + + subgraph Parser Layer + HPR[HieraParser] + HR[HieraResolver] + HSC[HieraScanner] + PP[PuppetParser] + end + + subgraph Cache Layer + HC[HieraCache] + FC[FactCache] + AC[AnalysisCache] + end + + IM --> HP + HP --> HS + HP --> CA + HS --> HPR + HS --> HR + HS --> HSC + HS --> FS + CA --> PP + + HS --> HC + FS --> FC + CA --> AC +``` + +## Components and Interfaces + +### Backend Components + +#### 1. HieraPlugin (backend/src/integrations/hiera/HieraPlugin.ts) + +Extends `BasePlugin` to integrate with the existing plugin architecture. + +```typescript +interface HieraPluginConfig { + enabled: boolean; + controlRepoPath: string; + hieraConfigPath?: string; // defaults to hiera.yaml + environments?: string[]; + factSources: { + puppetdb: boolean; + localPath?: string; + }; + catalogCompilation: { + enabled: boolean; + cacheTTL?: number; + }; + cache: { + ttl: number; + maxSize: number; + }; +} + +class HieraPlugin extends BasePlugin implements InformationSourcePlugin { + type = 'information' as const; + + async initialize(config: IntegrationConfig): Promise; + async healthCheck(): Promise; + async getInventory(): Promise; + async getNodeFacts(nodeId: string): Promise; + async getNodeData(nodeId: string, dataType: string): Promise; + + // Hiera-specific methods + getHieraService(): HieraService; + getCodeAnalyzer(): CodeAnalyzer; +} +``` + +#### 2. HieraService (backend/src/integrations/hiera/HieraService.ts) + +Core service orchestrating Hiera operations. + +```typescript +interface HieraService { + // Key discovery + getAllKeys(): Promise; + searchKeys(query: string): Promise; + + // Key resolution + resolveKey(nodeId: string, key: string): Promise; + resolveAllKeys(nodeId: string): Promise>; + + // Node-specific data + getNodeHieraData(nodeId: string): Promise; + getKeyUsageByNode(nodeId: string): Promise; + + // Global queries + getKeyValuesAcrossNodes(key: string): Promise; + + // Cache management + invalidateCache(): void; + reloadControlRepo(): Promise; +} + +interface HieraKey { + name: string; + locations: HieraKeyLocation[]; + lookupOptions?: LookupOptions; +} + +interface HieraKeyLocation { + file: string; + hierarchyLevel: string; + lineNumber: number; + value: unknown; +} + +interface HieraResolution { + key: string; + resolvedValue: unknown; + lookupMethod: 'first' | 'unique' | 'hash' | 'deep'; + sourceFile: string; + hierarchyLevel: string; + allValues: HieraKeyLocation[]; + interpolatedVariables?: Record; +} + +interface NodeHieraData { + nodeId: string; + facts: Facts; + keys: Map; + usedKeys: Set; + unusedKeys: Set; +} + +interface KeyNodeValues { + nodeId: string; + value: unknown; + sourceFile: string; + hierarchyLevel: string; +} +``` + +#### 3. HieraParser (backend/src/integrations/hiera/HieraParser.ts) + +Parses hiera.yaml configuration files. + +```typescript +interface HieraConfig { + version: 5; + defaults?: HieraDefaults; + hierarchy: HierarchyLevel[]; + lookupOptions?: Record; +} + +interface HierarchyLevel { + name: string; + path?: string; + paths?: string[]; + glob?: string; + globs?: string[]; + datadir?: string; + data_hash?: string; + lookup_key?: string; + mapped_paths?: [string, string, string]; + options?: Record; +} + +interface LookupOptions { + merge?: 'first' | 'unique' | 'hash' | 'deep'; + convert_to?: 'Array' | 'Hash'; + knockout_prefix?: string; +} + +interface HieraParser { + parse(configPath: string): Promise; + validateConfig(config: HieraConfig): ValidationResult; + expandHierarchyPaths(config: HieraConfig, facts: Facts): string[]; +} +``` + +#### 4. HieraResolver (backend/src/integrations/hiera/HieraResolver.ts) + +Resolves Hiera keys using the hierarchy and facts. + +```typescript +interface HieraResolver { + resolve( + key: string, + facts: Facts, + config: HieraConfig, + options?: ResolveOptions + ): Promise; + + resolveWithCatalog( + key: string, + nodeId: string, + environment: string + ): Promise; + + interpolateValue( + value: unknown, + facts: Facts, + variables?: Record + ): unknown; +} + +interface ResolveOptions { + lookupMethod?: 'first' | 'unique' | 'hash' | 'deep'; + defaultValue?: unknown; + mergeOptions?: MergeOptions; +} + +interface MergeOptions { + strategy: 'first' | 'unique' | 'hash' | 'deep'; + knockoutPrefix?: string; + sortMergedArrays?: boolean; + mergeHashArrays?: boolean; +} +``` + +#### 5. HieraScanner (backend/src/integrations/hiera/HieraScanner.ts) + +Scans hieradata files to build key index. + +```typescript +interface HieraScanner { + scan(hieradataPath: string): Promise; + watchForChanges(callback: () => void): void; + stopWatching(): void; +} + +interface HieraKeyIndex { + keys: Map; + files: Map; + lastScan: string; + totalKeys: number; + totalFiles: number; +} + +interface HieraFileInfo { + path: string; + hierarchyLevel: string; + keys: string[]; + lastModified: string; +} +``` + +#### 6. FactService (backend/src/integrations/hiera/FactService.ts) + +Thin wrapper that leverages the existing PuppetDB integration for fact retrieval, with fallback to local files. + +**Design Decision**: Rather than duplicating fact retrieval logic, this service delegates to the existing `PuppetDBService.getNodeFacts()` when PuppetDB integration is available. This ensures: + +- Single source of truth for PuppetDB communication +- Consistent caching behavior +- No code duplication + +```typescript +interface FactService { + /** + * Get facts for a node, using PuppetDB if available, falling back to local files + * @param nodeId - Node identifier (certname) + * @returns Facts and metadata about the source + */ + getFacts(nodeId: string): Promise; + + /** + * Get the fact source that would be used for a node + */ + getFactSource(nodeId: string): Promise<'puppetdb' | 'local' | 'none'>; + + /** + * List all nodes with available facts (from any source) + */ + listAvailableNodes(): Promise; +} + +interface FactResult { + facts: Facts; + source: 'puppetdb' | 'local'; + warnings?: string[]; +} + +interface LocalFactFile { + name: string; + values: Record; +} + +// Implementation approach: +class FactServiceImpl implements FactService { + constructor( + private integrationManager: IntegrationManager, + private localFactsPath?: string + ) {} + + async getFacts(nodeId: string): Promise { + // Try PuppetDB first via existing integration + const puppetdb = this.integrationManager.getInformationSource('puppetdb'); + if (puppetdb?.isInitialized()) { + try { + const facts = await puppetdb.getNodeFacts(nodeId); + return { facts, source: 'puppetdb' }; + } catch (error) { + // Fall through to local facts + } + } + + // Fall back to local facts + if (this.localFactsPath) { + const facts = await this.loadLocalFacts(nodeId); + if (facts) { + return { + facts, + source: 'local', + warnings: ['Using local fact files - facts may be outdated'] + }; + } + } + + // No facts available + return { + facts: { nodeId, gatheredAt: new Date().toISOString(), facts: {} }, + source: 'local', + warnings: [`No facts available for node '${nodeId}'`] + }; + } +} +``` + +#### 7. CodeAnalyzer (backend/src/integrations/hiera/CodeAnalyzer.ts) + +Performs static analysis of Puppet code. + +```typescript +interface CodeAnalyzer { + analyze(): Promise; + getUnusedCode(): Promise; + getLintIssues(): Promise; + getModuleUpdates(): Promise; + getUsageStatistics(): Promise; +} + +interface CodeAnalysisResult { + unusedCode: UnusedCodeReport; + lintIssues: LintIssue[]; + moduleUpdates: ModuleUpdate[]; + statistics: UsageStatistics; + analyzedAt: string; +} + +interface UnusedCodeReport { + unusedClasses: UnusedItem[]; + unusedDefinedTypes: UnusedItem[]; + unusedHieraKeys: UnusedItem[]; +} + +interface UnusedItem { + name: string; + file: string; + line: number; + type: 'class' | 'defined_type' | 'hiera_key'; +} + +interface LintIssue { + file: string; + line: number; + column: number; + severity: 'error' | 'warning' | 'info'; + message: string; + rule: string; + fixable: boolean; +} + +interface ModuleUpdate { + name: string; + currentVersion: string; + latestVersion: string; + source: 'forge' | 'git'; + hasSecurityAdvisory: boolean; + changelog?: string; +} + +interface UsageStatistics { + totalManifests: number; + totalClasses: number; + totalDefinedTypes: number; + totalFunctions: number; + linesOfCode: number; + mostUsedClasses: ClassUsage[]; + mostUsedResources: ResourceUsage[]; +} + +interface ClassUsage { + name: string; + usageCount: number; + nodes: string[]; +} + +interface ResourceUsage { + type: string; + count: number; +} +``` + +### API Routes + +#### Hiera Routes (backend/src/routes/hiera.ts) + +```typescript +// Configuration +GET /api/integrations/hiera/status +POST /api/integrations/hiera/reload + +// Key discovery +GET /api/integrations/hiera/keys +GET /api/integrations/hiera/keys/search?q={query} +GET /api/integrations/hiera/keys/{key} + +// Node-specific +GET /api/integrations/hiera/nodes/{nodeId}/data +GET /api/integrations/hiera/nodes/{nodeId}/keys +GET /api/integrations/hiera/nodes/{nodeId}/keys/{key} + +// Global key lookup +GET /api/integrations/hiera/keys/{key}/nodes + +// Code analysis +GET /api/integrations/hiera/analysis +GET /api/integrations/hiera/analysis/unused +GET /api/integrations/hiera/analysis/lint +GET /api/integrations/hiera/analysis/modules +GET /api/integrations/hiera/analysis/statistics +``` + +### Frontend Components + +#### 1. NodeHieraTab (frontend/src/components/NodeHieraTab.svelte) + +Displays Hiera data for a specific node with search and filtering. + +```typescript +interface NodeHieraTabProps { + nodeId: string; +} + +// Features: +// - Searchable list of all Hiera keys +// - Filter by used/unused keys +// - Expandable key details showing all hierarchy levels +// - Highlighted resolved value +// - Expert mode: show file paths, lookup methods, interpolation details +``` + +#### 2. GlobalHieraTab (frontend/src/components/GlobalHieraTab.svelte) + +Global Hiera key search across all nodes. + +```typescript +interface GlobalHieraTabProps {} + +// Features: +// - Search input for key name +// - Results grouped by resolved value +// - Node list with source file info +// - Click to navigate to node detail +``` + +#### 3. CodeAnalysisTab (frontend/src/components/CodeAnalysisTab.svelte) + +Displays code analysis results. + +```typescript +interface CodeAnalysisTabProps {} + +// Features: +// - Dashboard with statistics +// - Unused code section with file links +// - Lint issues with severity filtering +// - Module updates with version comparison +// - Most used classes ranking +``` + +#### 4. HieraSetupGuide (frontend/src/components/HieraSetupGuide.svelte) + +Setup instructions for the Hiera integration. + +```typescript +// Features: +// - Step-by-step configuration guide +// - Control repo path configuration +// - Fact source selection (PuppetDB vs local) +// - Catalog compilation mode toggle +// - Connection test button +``` + +## Data Models + +### Configuration Schema + +```typescript +// backend/src/config/schema.ts additions + +interface HieraConfig { + enabled: boolean; + controlRepoPath: string; + hieraConfigPath: string; // relative to controlRepoPath + environments: string[]; + factSources: { + preferPuppetDB: boolean; + localFactsPath?: string; + }; + catalogCompilation: { + enabled: boolean; + timeout: number; + cacheTTL: number; + }; + cache: { + enabled: boolean; + ttl: number; + maxEntries: number; + }; + codeAnalysis: { + enabled: boolean; + lintEnabled: boolean; + moduleUpdateCheck: boolean; + analysisInterval: number; + }; +} +``` + +### Database Schema (if needed for caching) + +```sql +-- Optional: For persistent caching of analysis results +CREATE TABLE hiera_analysis_cache ( + id TEXT PRIMARY KEY, + analysis_type TEXT NOT NULL, + data JSON NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + expires_at TIMESTAMP NOT NULL +); + +CREATE INDEX idx_hiera_cache_type ON hiera_analysis_cache(analysis_type); +CREATE INDEX idx_hiera_cache_expires ON hiera_analysis_cache(expires_at); +``` + +## Correctness Properties + +*A property is a characteristic or behavior that should hold true across all valid executions of a system-essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* + +### Property 1: Configuration Round-Trip + +*For any* valid configuration object containing control repo path, fact source settings, and catalog compilation mode, storing the configuration and then retrieving it SHALL produce an equivalent configuration object. + +**Validates: Requirements 1.1, 3.2, 12.1** + +### Property 2: Control Repository Validation + +*For any* filesystem path, the Configuration_Service SHALL return valid=true if and only if the path exists, is accessible, and contains the expected Puppet structure (hiera.yaml file). + +**Validates: Requirements 1.2, 1.3** + +### Property 3: Hiera Configuration Parsing Round-Trip + +*For any* valid Hiera 5 configuration object, serializing it to YAML and then parsing it back SHALL produce an equivalent configuration with all hierarchy levels, paths, and data providers preserved. + +**Validates: Requirements 2.1, 2.2** + +### Property 4: Hiera Parser Error Reporting + +*For any* YAML string containing syntax errors, the Hiera_Parser SHALL return an error result that includes the line number where the error occurs. + +**Validates: Requirements 2.5** + +### Property 5: Hierarchy Path Interpolation + +*For any* hierarchy path template containing fact variables (e.g., `%{facts.os.family}`) and any valid fact set, interpolating the path SHALL replace all variables with their corresponding fact values. + +**Validates: Requirements 2.6** + +### Property 6: Fact Source Priority + +*For any* node where both PuppetDB and local fact files contain facts, the Fact_Service SHALL return the PuppetDB facts when PuppetDB integration is available and configured as preferred. + +**Validates: Requirements 3.1, 3.5** + +### Property 7: Local Fact File Parsing + +*For any* valid JSON file in Puppetserver fact format (with "name" and "values" fields), the Fact_Service SHALL parse it and return a Facts object with all values accessible. + +**Validates: Requirements 3.3, 3.4** + +### Property 8: Key Scanning Completeness + +*For any* hieradata directory containing YAML files, the Hiera_Scanner SHALL discover all unique keys across all files, tracking for each key: the file path, hierarchy level, line number, and value. + +**Validates: Requirements 4.1, 4.2, 4.3, 4.4** + +### Property 9: Key Search Functionality + +*For any* key index and search query string, searching SHALL return all keys whose names contain the query string as a substring (case-insensitive). + +**Validates: Requirements 4.5, 7.4** + +### Property 10: Hiera Resolution Correctness + +*For any* Hiera key, fact set, and hierarchy configuration, the Hiera_Resolver SHALL: + +- Apply the correct lookup method (first, unique, hash, deep) based on lookup_options +- Return the value from the first matching hierarchy level (for 'first' lookup) +- Merge values according to the specified merge strategy (for merge lookups) +- Track which hierarchy level provided the final/winning value + +**Validates: Requirements 5.1, 5.2, 5.3, 5.4** + +### Property 11: Value Interpolation + +*For any* Hiera value containing variable references (e.g., `%{facts.hostname}`) and any fact set, resolving the value SHALL replace all variable references with their corresponding values from facts. + +**Validates: Requirements 5.5** + +### Property 12: Missing Key Handling + +*For any* Hiera key that does not exist in any hierarchy level for a given fact set, the Hiera_Resolver SHALL return a result indicating the key was not found (not throw an error). + +**Validates: Requirements 5.6, 3.6** + +### Property 13: Key Usage Filtering + +*For any* node with a set of included classes and a set of Hiera keys, filtering by "used" SHALL return only keys that are referenced by the included classes, and filtering by "unused" SHALL return the complement. + +**Validates: Requirements 6.6** + +### Property 14: Global Key Resolution Across Nodes + +*For any* Hiera key and set of nodes, querying the key across all nodes SHALL return for each node: the resolved value (or indication of not found), the source file, and the hierarchy level. + +**Validates: Requirements 7.2, 7.3, 7.6** + +### Property 15: Node Grouping by Value + +*For any* set of key-node-value tuples, grouping by resolved value SHALL produce groups where all nodes in each group have the same resolved value for the key. + +**Validates: Requirements 7.5** + +### Property 16: Unused Code Detection + +*For any* control repository with classes, defined types, and Hiera keys, and a set of node catalogs, the Code_Analyzer SHALL identify as unused: + +- Classes not included in any catalog +- Defined types not instantiated in any catalog +- Hiera keys not referenced in any manifest + +**Validates: Requirements 8.1, 8.2, 8.3** + +### Property 17: Unused Code Metadata + +*For any* unused code item detected, the result SHALL include the file path, line number, and item type (class, defined_type, or hiera_key). + +**Validates: Requirements 8.4** + +### Property 18: Exclusion Pattern Support + +*For any* set of exclusion patterns and unused code results, items matching any exclusion pattern SHALL NOT appear in the final unused code report. + +**Validates: Requirements 8.5** + +### Property 19: Lint Issue Detection + +*For any* Puppet manifest containing syntax errors or style violations, the Code_Analyzer SHALL detect and report issues with: severity level, file path, line number, column number, and descriptive message. + +**Validates: Requirements 9.1, 9.2, 9.3** + +### Property 20: Issue Filtering + +*For any* set of lint issues and filter criteria (severity, type), filtering SHALL return only issues matching all specified criteria. + +**Validates: Requirements 9.4** + +### Property 21: Puppetfile Parsing + +*For any* valid Puppetfile, the Code_Analyzer SHALL extract all module declarations with their names, versions, and sources (forge or git). + +**Validates: Requirements 10.1** + +### Property 22: Module Update Detection + +*For any* module with a specified version and a known latest version on Puppet Forge, if the latest version is newer than the current version, the Code_Analyzer SHALL indicate an update is available. + +**Validates: Requirements 10.2, 10.3** + +### Property 23: Code Statistics Accuracy + +*For any* control repository, the Code_Analyzer SHALL accurately count: total manifests, total classes, total defined types, total functions, and lines of code. + +**Validates: Requirements 11.1, 11.2, 11.3** + +### Property 24: Catalog Compilation Mode Behavior + +*For any* Hiera key resolution request: + +- When catalog compilation is disabled, only facts SHALL be used for variable interpolation +- When catalog compilation is enabled and succeeds, code-defined variables SHALL also be available +- When catalog compilation is enabled but fails, the resolver SHALL fall back to fact-only resolution + +**Validates: Requirements 12.2, 12.3, 12.4** + +### Property 25: Integration Enable/Disable Persistence + +*For any* Hiera integration configuration, disabling the integration SHALL preserve all configuration values, and re-enabling SHALL restore full functionality with the same configuration. + +**Validates: Requirements 13.5** + +### Property 26: API Response Correctness + +*For any* API request to Hiera endpoints: + +- GET /keys SHALL return all discovered keys +- GET /nodes/{id}/keys/{key} SHALL return the same resolution as HieraResolver.resolve() +- GET /analysis SHALL return the same results as CodeAnalyzer.analyze() + +**Validates: Requirements 14.1, 14.2, 14.3, 14.4, 14.5** + +### Property 27: API Error Handling + +*For any* API request when the Hiera integration is not configured, the API SHALL return an error response with HTTP status 503 and a message indicating setup is required. + +**Validates: Requirements 14.6** + +### Property 28: Cache Correctness + +*For any* sequence of Hiera operations, cached results SHALL be equivalent to freshly computed results until the underlying data changes. + +**Validates: Requirements 15.1, 15.5** + +### Property 29: Cache Invalidation on File Change + +*For any* hieradata file modification, the cache for affected keys SHALL be invalidated, and subsequent lookups SHALL return the updated values. + +**Validates: Requirements 15.2** + +### Property 30: Pagination Correctness + +*For any* API endpoint returning paginated results, iterating through all pages SHALL return all items exactly once, with no duplicates or omissions. + +**Validates: Requirements 15.6** + +## Error Handling + +### Error Categories + +1. **Configuration Errors** + - Invalid control repo path + - Missing hiera.yaml + - Invalid hiera.yaml syntax + - Inaccessible directories + +2. **Resolution Errors** + - Missing facts for node + - Circular variable references + - Invalid interpolation syntax + - Catalog compilation failures + +3. **Analysis Errors** + - Puppet syntax errors in manifests + - Puppetfile parse errors + - Forge API unavailable + - Large repository timeouts + +### Error Response Format + +```typescript +interface HieraError { + code: string; + message: string; + details?: { + file?: string; + line?: number; + suggestion?: string; + }; +} + +// Error codes +const HIERA_ERROR_CODES = { + NOT_CONFIGURED: 'HIERA_NOT_CONFIGURED', + INVALID_PATH: 'HIERA_INVALID_PATH', + PARSE_ERROR: 'HIERA_PARSE_ERROR', + RESOLUTION_ERROR: 'HIERA_RESOLUTION_ERROR', + FACTS_UNAVAILABLE: 'HIERA_FACTS_UNAVAILABLE', + CATALOG_COMPILATION_FAILED: 'HIERA_CATALOG_COMPILATION_FAILED', + ANALYSIS_ERROR: 'HIERA_ANALYSIS_ERROR', + FORGE_UNAVAILABLE: 'HIERA_FORGE_UNAVAILABLE', +} as const; +``` + +### Graceful Degradation + +The system SHALL gracefully degrade when components are unavailable, always displaying clear warnings to the user: + +- **PuppetDB unavailable**: Fall back to local facts. Display warning: "PuppetDB unavailable - using local fact files. Some facts may be outdated." +- **Catalog compilation fails**: Fall back to fact-only resolution. Display warning: "Catalog compilation failed for {node} - using fact-only resolution. Some Hiera variables may not resolve correctly." +- **Forge API unavailable**: Skip module update checks. Display warning: "Puppet Forge API unavailable - module update information may be incomplete." +- **Individual file parse errors**: Continue with remaining files. Display warning: "Failed to parse {file}: {error}. This file will be skipped." +- **Local facts missing for node**: Return empty fact set. Display warning: "No facts available for node {nodeId}. Hiera resolution may be incomplete." + +All warnings SHALL be: + +1. Logged to the backend console with appropriate log level (warn) +2. Returned in API responses in a `warnings` array +3. Displayed in the UI with a warning indicator (yellow/orange styling) +4. Accessible in Expert Mode with additional diagnostic details + +## Testing Strategy + +### Unit Tests + +Unit tests will cover: + +- HieraParser: YAML parsing, config validation, path expansion +- HieraResolver: Lookup methods, merge strategies, interpolation +- HieraScanner: File discovery, key extraction, index building +- CodeAnalyzer: Manifest parsing, unused detection, statistics +- FactService: Source selection, file parsing, caching + +### Property-Based Tests + +Property-based tests will validate the correctness properties defined above using fast-check library: + +- Configuration round-trip (Property 1) +- Parsing round-trip (Property 3) +- Resolution correctness (Property 10) +- Value interpolation (Property 11) +- Cache correctness (Property 28) + +Each property test will run minimum 100 iterations with generated inputs. + +### Integration Tests + +Integration tests will cover: + +- Full resolution flow from API to file system +- PuppetDB fact retrieval integration +- File watching and cache invalidation +- Multi-environment scenarios + +### Test Configuration + +```typescript +// vitest.config.ts additions +export default defineConfig({ + test: { + include: ['src/integrations/hiera/**/*.test.ts'], + coverage: { + include: ['src/integrations/hiera/**/*.ts'], + exclude: ['**/*.test.ts', '**/types.ts'], + thresholds: { + lines: 80, + functions: 80, + branches: 75, + }, + }, + }, +}); +``` + +### Test Data Generators + +```typescript +// Property test generators using fast-check +import * as fc from 'fast-check'; + +// Generate valid Hiera keys +const hieraKeyArb = fc.stringOf( + fc.constantFrom(...'abcdefghijklmnopqrstuvwxyz_:'.split('')), + { minLength: 1, maxLength: 50 } +); + +// Generate valid fact sets +const factsArb = fc.dictionary( + fc.string({ minLength: 1, maxLength: 20 }), + fc.oneof(fc.string(), fc.integer(), fc.boolean()) +); + +// Generate hierarchy levels +const hierarchyLevelArb = fc.record({ + name: fc.string({ minLength: 1, maxLength: 30 }), + path: fc.string({ minLength: 1, maxLength: 100 }), +}); + +// Generate Hiera configs +const hieraConfigArb = fc.record({ + version: fc.constant(5), + hierarchy: fc.array(hierarchyLevelArb, { minLength: 1, maxLength: 10 }), +}); +``` diff --git a/.kiro/specs/hiera-codebase-integration/requirements.md b/.kiro/specs/hiera-codebase-integration/requirements.md new file mode 100644 index 0000000..cf0d04c --- /dev/null +++ b/.kiro/specs/hiera-codebase-integration/requirements.md @@ -0,0 +1,213 @@ +# Requirements Document + +## Introduction + +This document defines the requirements for Pabawi v0.4.0's Hiera and Local Puppet Codebase Integration feature. This integration enables users to configure a local Puppet control repository directory, providing deep visibility into Hiera data, key resolution, and static code analysis capabilities. The feature integrates seamlessly with existing PuppetDB integration for fact retrieval while supporting standalone operation with local fact files. + +## Glossary + +- **Hiera**: Puppet's built-in key-value configuration data lookup system +- **Control_Repository**: A Git repository containing Puppet code, modules, and Hiera data +- **Hieradata**: YAML/JSON files containing configuration data organized by hierarchy levels +- **Hiera_Level**: A layer in the Hiera hierarchy (e.g., node-specific, environment, common) +- **Lookup_Method**: Hiera data retrieval strategy (first, unique, hash, deep) +- **Lookup_Options**: Per-key configuration defining merge behavior and lookup strategy +- **Fact**: A piece of system information collected by Puppet agent +- **Catalog**: Compiled Puppet configuration for a specific node +- **Puppetfile**: File defining external modules and their versions +- **Integration_Manager**: Pabawi's system for managing external service connections +- **Expert_Mode**: Advanced UI mode showing additional technical details + +## Requirements + +### Requirement 1: Control Repository Configuration + +**User Story:** As a Puppet administrator, I want to configure a local control repository directory, so that Pabawi can analyze my Puppet codebase and Hiera data. + +#### Acceptance Criteria + +1. THE Configuration_Service SHALL accept a filesystem path to a Puppet control repository +2. WHEN a control repository path is configured, THE Configuration_Service SHALL validate the directory contains expected Puppet structure (hiera.yaml, hieradata directory, manifests) +3. IF the configured path does not exist or is inaccessible, THEN THE Configuration_Service SHALL return a descriptive error message +4. WHEN the control repository is valid, THE Integration_Manager SHALL register the Hiera integration as available +5. THE Configuration_Service SHALL support configuring multiple environment directories within the control repository +6. WHEN configuration changes, THE Hiera_Service SHALL reload the control repository data without requiring application restart + +### Requirement 2: Hiera Configuration Parsing + +**User Story:** As a Puppet administrator, I want Pabawi to parse my hiera.yaml configuration, so that it understands my hierarchy structure and lookup behavior. + +#### Acceptance Criteria + +1. THE Hiera_Parser SHALL parse hiera.yaml files in Hiera 5 format +2. WHEN parsing hiera.yaml, THE Hiera_Parser SHALL extract all hierarchy levels with their paths and data providers +3. THE Hiera_Parser SHALL support yaml, json, and eyaml data backends +4. WHEN lookup_options are defined in hieradata, THE Hiera_Parser SHALL extract and apply them during lookups +5. IF hiera.yaml contains syntax errors, THEN THE Hiera_Parser SHALL return a descriptive error with line number +6. THE Hiera_Parser SHALL support variable interpolation in hierarchy paths using facts and other variables + +### Requirement 3: Fact Source Configuration + +**User Story:** As a Puppet administrator, I want to configure how facts are retrieved for Hiera resolution, so that I can use PuppetDB or local fact files. + +#### Acceptance Criteria + +1. WHEN PuppetDB integration is available, THE Fact_Service SHALL retrieve node facts from PuppetDB by default +2. THE Configuration_Service SHALL accept a filesystem path to a directory containing local fact files +3. WHEN local fact files are configured, THE Fact_Service SHALL parse JSON files named by node hostname +4. THE Fact_Service SHALL support the Puppetserver fact file format with "name" and "values" structure +5. IF both PuppetDB and local facts are available for a node, THE Fact_Service SHALL prefer PuppetDB facts +6. IF facts cannot be retrieved for a node, THEN THE Fact_Service SHALL return an empty fact set with a warning + +### Requirement 4: Hiera Key Discovery + +**User Story:** As a Puppet administrator, I want to see all Hiera keys present in my hieradata, so that I can understand what configuration data is available. + +#### Acceptance Criteria + +1. THE Hiera_Scanner SHALL recursively scan all hieradata files and extract unique keys +2. WHEN scanning hieradata, THE Hiera_Scanner SHALL track which file and hierarchy level each key appears in +3. THE Hiera_Scanner SHALL support nested keys using dot notation (e.g., "profile::nginx::port") +4. WHEN a key appears in multiple hierarchy levels, THE Hiera_Scanner SHALL list all occurrences with their values +5. THE Hiera_Scanner SHALL provide a searchable index of all discovered keys +6. WHEN hieradata files change, THE Hiera_Scanner SHALL update the key index + +### Requirement 5: Hiera Key Resolution + +**User Story:** As a Puppet administrator, I want to resolve Hiera keys for specific nodes, so that I can see the actual values that would be used during Puppet runs. + +#### Acceptance Criteria + +1. THE Hiera_Resolver SHALL resolve key values using the configured hierarchy and node facts +2. WHEN resolving a key, THE Hiera_Resolver SHALL apply the appropriate lookup method (first, unique, hash, deep) +3. THE Hiera_Resolver SHALL honor lookup_options defined in hieradata for merge behavior +4. WHEN resolving, THE Hiera_Resolver SHALL track which hierarchy level provided the final value +5. THE Hiera_Resolver SHALL support variable interpolation in values using facts +6. IF a key cannot be resolved, THEN THE Hiera_Resolver SHALL indicate no value found + +### Requirement 6: Node Hiera Tab + +**User Story:** As a Puppet administrator, I want a Hiera tab in the node detail view, so that I can see all Hiera data relevant to a specific node. + +#### Acceptance Criteria + +1. WHEN viewing a node, THE Node_Detail_Page SHALL display a Hiera tab +2. THE Hiera_Tab SHALL display a searchable list of all Hiera keys +3. WHEN displaying a key, THE Hiera_Tab SHALL show values from each hierarchy level where the key exists +4. THE Hiera_Tab SHALL highlight the resolved value that would be used for the node +5. WHEN a key is used by classes included on the node, THE Hiera_Tab SHALL indicate this with visual highlighting +6. THE Hiera_Tab SHALL support filtering keys by usage status (used/unused by node classes) +7. WHEN Expert_Mode is enabled, THE Hiera_Tab SHALL show additional resolution details including lookup method and source file paths + +### Requirement 7: Global Hiera Search Tab + +**User Story:** As a Puppet administrator, I want a global Hiera tab in the Puppet page, so that I can search for any key and see its value across all nodes. + +#### Acceptance Criteria + +1. THE Puppet_Page SHALL include a Hiera tab for global key search +2. WHEN searching for a key, THE Global_Hiera_Tab SHALL display the resolved value for each node +3. THE Global_Hiera_Tab SHALL show which hieradata file provides the value for each node +4. THE Global_Hiera_Tab SHALL support searching by partial key name +5. WHEN displaying results, THE Global_Hiera_Tab SHALL group nodes by their resolved value +6. THE Global_Hiera_Tab SHALL indicate nodes where the key is not defined + +### Requirement 8: Code Analysis - Unused Code Detection + +**User Story:** As a Puppet administrator, I want to identify unused code in my control repository, so that I can clean up and maintain my codebase. + +#### Acceptance Criteria + +1. THE Code_Analyzer SHALL identify classes that are not included by any node +2. THE Code_Analyzer SHALL identify defined types that are not instantiated +3. THE Code_Analyzer SHALL identify Hiera keys that are not referenced in any manifest +4. WHEN displaying unused code, THE Code_Analysis_Page SHALL show the file location and type +5. THE Code_Analyzer SHALL support excluding specific patterns from unused code detection + +### Requirement 9: Code Analysis - Puppet Lint Integration + +**User Story:** As a Puppet administrator, I want to see Puppet lint and syntax issues, so that I can improve code quality. + +#### Acceptance Criteria + +1. THE Code_Analyzer SHALL detect Puppet syntax errors in manifests +2. THE Code_Analyzer SHALL identify common Puppet lint issues (style violations, deprecated syntax) +3. WHEN displaying issues, THE Code_Analysis_Page SHALL show severity, file, line number, and description +4. THE Code_Analysis_Page SHALL support filtering issues by severity and type +5. THE Code_Analyzer SHALL provide issue counts grouped by category + +### Requirement 10: Code Analysis - Module Updates + +**User Story:** As a Puppet administrator, I want to see which modules in my Puppetfile can be updated, so that I can keep dependencies current. + +#### Acceptance Criteria + +1. THE Code_Analyzer SHALL parse the Puppetfile and extract module dependencies with versions +2. WHEN a module has a newer version available on Puppet Forge, THE Code_Analyzer SHALL indicate the update +3. THE Code_Analysis_Page SHALL display current version and latest available version for each module +4. THE Code_Analysis_Page SHALL indicate modules with security advisories if available +5. IF the Puppetfile cannot be parsed, THEN THE Code_Analyzer SHALL return a descriptive error + +### Requirement 11: Code Analysis - Usage Statistics + +**User Story:** As a Puppet administrator, I want to see usage statistics for my Puppet code, so that I can understand my codebase composition. + +#### Acceptance Criteria + +1. THE Code_Analyzer SHALL count and rank classes by usage frequency across nodes +2. THE Code_Analyzer SHALL count total manifests, classes, defined types, and functions +3. THE Code_Analyzer SHALL calculate lines of code and complexity metrics +4. THE Code_Analysis_Page SHALL display statistics in a dashboard format +5. THE Code_Analysis_Page SHALL show most frequently used classes and resources + +### Requirement 12: Catalog Compilation Mode + +**User Story:** As a Puppet administrator, I want to optionally enable catalog compilation for Hiera resolution, so that I can resolve keys that depend on Puppet code variables. + +#### Acceptance Criteria + +1. THE Configuration_Service SHALL support a catalog compilation mode setting (enabled/disabled) +2. WHEN catalog compilation is disabled (default), THE Hiera_Resolver SHALL only use facts for variable interpolation +3. WHEN catalog compilation is enabled, THE Hiera_Resolver SHALL attempt to compile a catalog to resolve code-defined variables +4. IF catalog compilation fails, THEN THE Hiera_Resolver SHALL fall back to fact-only resolution with a warning +5. THE Configuration_UI SHALL explain the performance implications of enabling catalog compilation +6. WHEN catalog compilation is enabled, THE Hiera_Resolver SHALL cache compiled catalogs to improve performance + +### Requirement 13: Integration Setup and Status + +**User Story:** As a Puppet administrator, I want clear setup instructions and status indicators for the Hiera integration, so that I can configure and troubleshoot it easily. + +#### Acceptance Criteria + +1. THE Integration_Setup_Page SHALL include a Hiera integration section with setup instructions +2. THE Integration_Status_Component SHALL display Hiera integration health (connected, error, not configured) +3. WHEN the integration has errors, THE Integration_Status_Component SHALL display actionable error messages +4. THE Setup_Instructions SHALL include examples for common control repository structures +5. THE Integration_Manager SHALL support enabling/disabling the Hiera integration without removing configuration +6. WHEN Expert_Mode is enabled, THE Integration_Status_Component SHALL show detailed diagnostic information + +### Requirement 14: API Endpoints + +**User Story:** As a developer, I want REST API endpoints for Hiera and code analysis data, so that I can integrate with other tools. + +#### Acceptance Criteria + +1. THE API SHALL provide an endpoint to list all discovered Hiera keys +2. THE API SHALL provide an endpoint to resolve a Hiera key for a specific node +3. THE API SHALL provide an endpoint to get Hiera data for a node (all keys with resolved values) +4. THE API SHALL provide an endpoint to get code analysis results +5. THE API SHALL provide an endpoint to get Puppetfile module update information +6. WHEN the integration is not configured, THE API SHALL return appropriate error responses with setup guidance + +### Requirement 15: Performance and Caching + +**User Story:** As a Puppet administrator, I want the Hiera integration to perform efficiently, so that it doesn't slow down the application. + +#### Acceptance Criteria + +1. THE Hiera_Service SHALL cache parsed hieradata to avoid repeated file reads +2. THE Hiera_Service SHALL implement file watching to invalidate cache when hieradata changes +3. THE Code_Analyzer SHALL cache analysis results with configurable TTL +4. WHEN scanning large control repositories, THE Hiera_Scanner SHALL provide progress indication +5. THE Hiera_Resolver SHALL cache resolved values per node with appropriate invalidation +6. THE API SHALL support pagination for endpoints returning large result sets diff --git a/.kiro/specs/hiera-codebase-integration/tasks.md b/.kiro/specs/hiera-codebase-integration/tasks.md new file mode 100644 index 0000000..45d733d --- /dev/null +++ b/.kiro/specs/hiera-codebase-integration/tasks.md @@ -0,0 +1,593 @@ +# Implementation Plan: Hiera and Local Puppet Codebase Integration + +## Overview + +This implementation plan breaks down the Hiera and Local Puppet Codebase Integration feature into discrete, incremental tasks. Each task builds on previous work, ensuring no orphaned code. The implementation follows the existing integration plugin architecture used by PuppetDB and Puppetserver integrations. + +## Tasks + +- [x] 1. Set up Hiera integration infrastructure + - [x] 1.1 Create directory structure for Hiera integration + - Create `backend/src/integrations/hiera/` directory + - Create index.ts, types.ts files + - _Requirements: 1.4, 13.1_ + + - [x] 1.2 Define TypeScript types and interfaces + - Define HieraConfig, HieraKey, HieraResolution, HieraKeyIndex interfaces + - Define CodeAnalysisResult, LintIssue, ModuleUpdate interfaces + - Define API request/response types + - _Requirements: 14.1-14.6_ + + - [x] 1.3 Add Hiera configuration schema + - Add HieraConfig to backend/src/config/schema.ts + - Add environment variable mappings + - Update .env.example with Hiera configuration options + - _Requirements: 1.1, 1.5, 3.2, 12.1_ + +- [x] 2. Implement HieraParser + - [x] 2.1 Create HieraParser class + - Implement hiera.yaml parsing for Hiera 5 format + - Extract hierarchy levels, paths, data providers + - Support yaml, json, eyaml backend detection + - _Requirements: 2.1, 2.2, 2.3_ + + - [x] 2.2 Write property test for Hiera config parsing round-trip + - **Property 3: Hiera Configuration Parsing Round-Trip** + - **Validates: Requirements 2.1, 2.2** + + - [x] 2.3 Implement lookup_options extraction + - Parse lookup_options from hieradata files + - Support merge strategies (first, unique, hash, deep) + - _Requirements: 2.4_ + + - [x] 2.4 Implement error handling for invalid hiera.yaml + - Return descriptive errors with line numbers + - Handle missing files gracefully + - _Requirements: 2.5_ + + - [x] 2.5 Write property test for parser error reporting + - **Property 4: Hiera Parser Error Reporting** + - **Validates: Requirements 2.5** + + - [x] 2.6 Implement hierarchy path interpolation + - Support %{facts.xxx} variable syntax + - Support %{::xxx} legacy syntax + - _Requirements: 2.6_ + + - [x] 2.7 Write property test for path interpolation + - **Property 5: Hierarchy Path Interpolation** + - **Validates: Requirements 2.6** + +- [x] 3. Checkpoint - Ensure parser tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 4. Implement FactService + - [x] 4.1 Create FactService class + - Implement thin wrapper around existing PuppetDB integration + - Delegate to IntegrationManager.getInformationSource('puppetdb').getNodeFacts() + - Support local fact files as fallback only + - _Requirements: 3.1, 3.2_ + + - [x] 4.2 Implement local fact file parsing (fallback only) + - Parse JSON files in Puppetserver format + - Support "name" and "values" structure + - Only used when PuppetDB unavailable or missing facts + - _Requirements: 3.3, 3.4_ + + - [x] 4.3 Write property test for local fact file parsing + - **Property 7: Local Fact File Parsing** + - **Validates: Requirements 3.3, 3.4** + + - [x] 4.4 Implement fact source priority logic + - Prefer PuppetDB when available + - Fall back to local facts with warning + - Return empty set with warning when no facts available + - _Requirements: 3.5, 3.6_ + + - [x] 4.5 Write property test for fact source priority + - **Property 6: Fact Source Priority** + - **Validates: Requirements 3.1, 3.5** + +- [x] 5. Implement HieraScanner + - [x] 5.1 Create HieraScanner class + - Recursively scan hieradata directories + - Extract unique keys from YAML/JSON files + - Track file path, hierarchy level, line number for each key + - _Requirements: 4.1, 4.2_ + + - [x] 5.2 Implement nested key support + - Handle dot notation keys (e.g., profile::nginx::port) + - Build hierarchical key index + - _Requirements: 4.3_ + + - [x] 5.3 Implement multi-occurrence tracking + - Track all locations where a key appears + - Store value at each location + - _Requirements: 4.4_ + + - [x] 5.4 Write property test for key scanning completeness + - **Property 8: Key Scanning Completeness** + - **Validates: Requirements 4.1, 4.2, 4.3, 4.4** + + - [x] 5.5 Implement key search functionality + - Support partial key name matching + - Case-insensitive search + - _Requirements: 4.5_ + + - [x] 5.6 Write property test for key search + - **Property 9: Key Search Functionality** + - **Validates: Requirements 4.5, 7.4** + + - [x] 5.7 Implement file watching for cache invalidation + - Watch hieradata directory for changes + - Invalidate affected cache entries + - _Requirements: 4.6, 15.2_ + +- [x] 6. Checkpoint - Ensure scanner tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 7. Implement HieraResolver + - [x] 7.1 Create HieraResolver class + - Implement key resolution using hierarchy and facts + - Support all lookup methods (first, unique, hash, deep) + - _Requirements: 5.1, 5.2_ + + - [x] 7.2 Implement lookup_options handling + - Apply merge behavior from lookup_options + - Support knockout_prefix for deep merges + - _Requirements: 5.3_ + + - [x] 7.3 Implement source tracking + - Track which hierarchy level provided the value + - Record all values from all levels + - _Requirements: 5.4_ + + - [x] 7.4 Write property test for resolution correctness + - **Property 10: Hiera Resolution Correctness** + - **Validates: Requirements 5.1, 5.2, 5.3, 5.4** + + - [x] 7.5 Implement value interpolation + - Replace %{facts.xxx} with fact values + - Handle nested interpolation + - _Requirements: 5.5_ + + - [x] 7.6 Write property test for value interpolation + - **Property 11: Value Interpolation** + - **Validates: Requirements 5.5** + + - [x] 7.7 Implement missing key handling + - Return appropriate indicator for missing keys + - Do not throw errors for missing keys + - _Requirements: 5.6_ + + - [x] 7.8 Write property test for missing key handling + - **Property 12: Missing Key Handling** + - **Validates: Requirements 5.6, 3.6** + +- [x] 8. Checkpoint - Ensure resolver tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 9. Implement HieraService + - [x] 9.1 Create HieraService class + - Orchestrate HieraParser, HieraScanner, HieraResolver, FactService + - Implement caching layer + - _Requirements: 15.1, 15.5_ + + - [x] 9.2 Implement getAllKeys and searchKeys methods + - Return all discovered keys + - Support search filtering + - _Requirements: 4.5_ + + - [x] 9.3 Implement resolveKey and resolveAllKeys methods + - Resolve single key for a node + - Resolve all keys for a node + - _Requirements: 5.1_ + + - [x] 9.4 Implement getNodeHieraData method + - Return all Hiera data for a node + - Include used/unused key classification + - _Requirements: 6.2, 6.6_ + + - [x] 9.5 Write property test for key usage filtering + - **Property 13: Key Usage Filtering** + - **Validates: Requirements 6.6** + + - [x] 9.6 Implement getKeyValuesAcrossNodes method + - Return key values for all nodes + - Include source file info + - _Requirements: 7.2, 7.3_ + + - [x] 9.7 Write property test for global key resolution + - **Property 14: Global Key Resolution Across Nodes** + - **Validates: Requirements 7.2, 7.3, 7.6** + + - [x] 9.8 Write property test for node grouping by value + - **Property 15: Node Grouping by Value** + - **Validates: Requirements 7.5** + + - [x] 9.9 Implement cache management + - Cache parsed hieradata + - Cache resolved values per node + - Implement cache invalidation on file changes + - _Requirements: 15.1, 15.2, 15.5_ + + - [x] 9.10 Write property test for cache correctness āœ… PBT PASSED + - **Property 28: Cache Correctness** + - **Validates: Requirements 15.1, 15.5** + + - [x] 9.11 Write property test for cache invalidation āœ… PBT PASSED + - **Property 29: Cache Invalidation on File Change** + - **Validates: Requirements 15.2** + +- [x] 10. Checkpoint - Ensure HieraService tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 11. Implement catalog compilation mode + - [x] 11.1 Add catalog compilation configuration + - Add enabled/disabled setting + - Add timeout and cache TTL settings + - _Requirements: 12.1_ + + - [x] 11.2 Implement catalog compilation for variable resolution + - Attempt catalog compilation when enabled + - Extract code-defined variables + - _Requirements: 12.3_ + + - [x] 11.3 Implement fallback behavior + - Fall back to fact-only resolution on failure + - Display warning when fallback occurs + - _Requirements: 12.4_ + + - [x] 11.4 Write property test for catalog compilation mode āœ… PBT PASSED + - **Property 24: Catalog Compilation Mode Behavior** + - **Validates: Requirements 12.2, 12.3, 12.4** + + - [x] 11.5 Implement catalog caching + - Cache compiled catalogs + - Implement appropriate invalidation + - _Requirements: 12.6_ + +- [x] 12. Implement CodeAnalyzer + - [x] 12.1 Create CodeAnalyzer class + - Set up Puppet manifest parsing + - Implement analysis result caching + - _Requirements: 15.3_ + + - [x] 12.2 Implement unused code detection + - Detect unused classes + - Detect unused defined types + - Detect unused Hiera keys + - _Requirements: 8.1, 8.2, 8.3_ + + - [ ]* 12.3 Write property test for unused code detection + - **Property 16: Unused Code Detection** + - **Validates: Requirements 8.1, 8.2, 8.3** + + - [x] 12.4 Implement unused code metadata + - Include file path, line number, type for each item + - _Requirements: 8.4_ + + - [ ]* 12.5 Write property test for unused code metadata + - **Property 17: Unused Code Metadata** + - **Validates: Requirements 8.4** + + - [x] 12.6 Implement exclusion pattern support + - Allow excluding patterns from unused detection + - _Requirements: 8.5_ + + - [ ]* 12.7 Write property test for exclusion patterns + - **Property 18: Exclusion Pattern Support** + - **Validates: Requirements 8.5** + + - [x] 12.8 Implement lint issue detection + - Detect Puppet syntax errors + - Detect common style violations + - _Requirements: 9.1, 9.2_ + + - [ ]* 12.9 Write property test for lint issue detection + - **Property 19: Lint Issue Detection** + - **Validates: Requirements 9.1, 9.2, 9.3** + + - [x] 12.10 Implement issue filtering + - Filter by severity + - Filter by type + - _Requirements: 9.4_ + + - [ ]* 12.11 Write property test for issue filtering + - **Property 20: Issue Filtering** + - **Validates: Requirements 9.4** + + - [x] 12.12 Implement issue counting by category + - Group and count issues + - _Requirements: 9.5_ + +- [x] 13. Checkpoint - Ensure CodeAnalyzer tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 14. Implement Puppetfile analysis + - [x] 14.1 Implement Puppetfile parsing + - Extract module names, versions, sources + - Handle forge and git modules + - _Requirements: 10.1_ + + - [ ]* 14.2 Write property test for Puppetfile parsing + - **Property 21: Puppetfile Parsing** + - **Validates: Requirements 10.1** + + - [x] 14.3 Implement module update detection + - Query Puppet Forge for latest versions + - Compare with current versions + - _Requirements: 10.2_ + + - [ ]* 14.4 Write property test for module update detection + - **Property 22: Module Update Detection** + - **Validates: Requirements 10.2, 10.3** + + - [x] 14.5 Implement security advisory detection + - Check for security advisories on modules + - _Requirements: 10.4_ + + - [x] 14.6 Implement Puppetfile error handling + - Return descriptive errors for parse failures + - _Requirements: 10.5_ + +- [x] 15. Implement usage statistics + - [x] 15.1 Implement class usage counting + - Count class usage across nodes + - Rank by frequency + - _Requirements: 11.1_ + + - [x] 15.2 Implement code counting + - Count manifests, classes, defined types, functions + - Calculate lines of code + - _Requirements: 11.2, 11.3_ + + - [ ]* 15.3 Write property test for code statistics + - **Property 23: Code Statistics Accuracy** + - **Validates: Requirements 11.1, 11.2, 11.3** + + - [x] 15.4 Implement most used items ranking + - Rank classes by usage + - Rank resources by count + - _Requirements: 11.5_ + +- [ ] 16. Checkpoint - Ensure statistics tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 17. Implement HieraPlugin + - [x] 17.1 Create HieraPlugin class extending BasePlugin + - Implement InformationSourcePlugin interface + - Wire up HieraService and CodeAnalyzer + - _Requirements: 1.4_ + + - [x] 17.2 Implement control repository validation + - Validate path exists and is accessible + - Validate expected Puppet structure + - _Requirements: 1.2, 1.3_ + + - [ ]* 17.3 Write property test for repository validation + - **Property 2: Control Repository Validation** + - **Validates: Requirements 1.2, 1.3** + + - [x] 17.4 Implement health check + - Check control repo accessibility + - Check hiera.yaml validity + - Report integration status + - _Requirements: 13.2, 13.3_ + + - [x] 17.5 Implement enable/disable functionality + - Support disabling without removing config + - _Requirements: 13.5_ + + - [ ]* 17.6 Write property test for enable/disable persistence + - **Property 25: Integration Enable/Disable Persistence** + - **Validates: Requirements 13.5** + + - [x] 17.7 Implement hot reload + - Reload control repo data on config change + - _Requirements: 1.6_ + +- [x] 18. Implement API routes + - [x] 18.1 Create Hiera API routes file + - Set up Express router + - Add authentication middleware + - _Requirements: 14.1-14.6_ + + - [x] 18.2 Implement key discovery endpoints + - GET /api/integrations/hiera/keys + - GET /api/integrations/hiera/keys/search + - GET /api/integrations/hiera/keys/{key} + - _Requirements: 14.1_ + + - [x] 18.3 Implement node-specific endpoints + - GET /api/integrations/hiera/nodes/{nodeId}/data + - GET /api/integrations/hiera/nodes/{nodeId}/keys + - GET /api/integrations/hiera/nodes/{nodeId}/keys/{key} + - _Requirements: 14.2, 14.3_ + + - [x] 18.4 Implement global key lookup endpoint + - GET /api/integrations/hiera/keys/{key}/nodes + - _Requirements: 14.2_ + + - [x] 18.5 Implement code analysis endpoints + - GET /api/integrations/hiera/analysis + - GET /api/integrations/hiera/analysis/unused + - GET /api/integrations/hiera/analysis/lint + - GET /api/integrations/hiera/analysis/modules + - GET /api/integrations/hiera/analysis/statistics + - _Requirements: 14.4, 14.5_ + + - [x] 18.6 Implement status and reload endpoints + - GET /api/integrations/hiera/status + - POST /api/integrations/hiera/reload + - _Requirements: 13.2_ + + - [x] 18.7 Implement error handling for unconfigured integration + - Return 503 with setup guidance + - _Requirements: 14.6_ + + - [ ]* 18.8 Write property test for API response correctness + - **Property 26: API Response Correctness** + - **Validates: Requirements 14.1, 14.2, 14.3, 14.4, 14.5** + + - [ ]* 18.9 Write property test for API error handling + - **Property 27: API Error Handling** + - **Validates: Requirements 14.6** + + - [x] 18.10 Implement pagination for large result sets + - Add pagination parameters + - Return pagination metadata + - _Requirements: 15.6_ + + - [ ]* 18.11 Write property test for pagination correctness + - **Property 30: Pagination Correctness** + - **Validates: Requirements 15.6** + +- [ ] 19. Checkpoint - Ensure API tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 20. Implement frontend NodeHieraTab component + - [x] 20.1 Create NodeHieraTab.svelte component + - Set up component structure + - Add to NodeDetailPage tabs + - _Requirements: 6.1_ + + - [x] 20.2 Implement key list display + - Display searchable list of all keys + - Show values from each hierarchy level + - _Requirements: 6.2, 6.3_ + + - [x] 20.3 Implement resolved value highlighting + - Highlight the resolved value + - Show visual indicator for used keys + - _Requirements: 6.4, 6.5_ + + - [x] 20.4 Implement key filtering + - Filter by used/unused status + - _Requirements: 6.6_ + + - [x] 20.5 Implement expert mode details + - Show lookup method, source file paths + - Show interpolation details + - _Requirements: 6.7_ + +- [x] 21. Implement frontend GlobalHieraTab component + - [x] 21.1 Create GlobalHieraTab.svelte component + - Set up component structure + - Add to PuppetPage tabs + - _Requirements: 7.1_ + + - [x] 21.2 Implement key search + - Add search input + - Support partial key name matching + - _Requirements: 7.4_ + + - [x] 21.3 Implement results display + - Show resolved value for each node + - Show source file info + - _Requirements: 7.2, 7.3_ + + - [x] 21.4 Implement node grouping + - Group nodes by resolved value + - Indicate nodes where key is not defined + - _Requirements: 7.5, 7.6_ + +- [x] 22. Implement frontend CodeAnalysisTab component + - [x] 22.1 Create CodeAnalysisTab.svelte component + - Set up component structure + - Add to PuppetPage tabs + - _Requirements: 8.4, 9.3, 10.3, 11.4_ + + - [x] 22.2 Implement statistics dashboard + - Display code statistics + - Show most used classes + - _Requirements: 11.4, 11.5_ + + - [x] 22.3 Implement unused code section + - Display unused classes, defined types, keys + - Show file location and type + - _Requirements: 8.4_ + + - [x] 22.4 Implement lint issues section + - Display issues with severity, file, line, description + - Support filtering by severity and type + - _Requirements: 9.3, 9.4_ + + - [x] 22.5 Implement module updates section + - Display current and latest versions + - Indicate security advisories + - _Requirements: 10.3, 10.4_ + +- [x] 23. Implement frontend HieraSetupGuide component + - [x] 23.1 Create HieraSetupGuide.svelte component + - Set up component structure + - Add to IntegrationSetupPage + - _Requirements: 13.1_ + + - [x] 23.2 Implement setup instructions + - Step-by-step configuration guide + - Control repo path configuration + - _Requirements: 13.4_ + + - [x] 23.3 Implement fact source configuration + - PuppetDB vs local facts selection + - Local facts path configuration + - _Requirements: 3.2_ + + - [x] 23.4 Implement catalog compilation toggle + - Enable/disable toggle + - Performance implications explanation + - _Requirements: 12.5_ + + - [x] 23.5 Implement connection test + - Test button to validate configuration + - Display validation results + - _Requirements: 1.2_ + +- [x] 24. Implement IntegrationStatus updates + - [x] 24.1 Update IntegrationStatus component + - Add Hiera integration status display + - Show health status (connected, error, not configured) + - _Requirements: 13.2_ + + - [x] 24.2 Implement error message display + - Show actionable error messages + - _Requirements: 13.3_ + + - [x] 24.3 Implement expert mode diagnostics + - Show detailed diagnostic info in expert mode + - _Requirements: 13.6_ + +- [x] 25. Wire up integration + - [x] 25.1 Register HieraPlugin with IntegrationManager + - Add to plugin registration in server startup + - _Requirements: 1.4_ + + - [x] 25.2 Add Hiera routes to Express app + - Mount routes at /api/integrations/hiera + - _Requirements: 14.1-14.6_ + + - [x] 25.3 Update Navigation component + - Add Hiera-related navigation items + - _Requirements: 6.1, 7.1_ + + - [x] 25.4 Update Router component + - Add routes for new pages/tabs + - _Requirements: 6.1, 7.1_ + +- [ ] 26. Final checkpoint - Full integration test + - Ensure all tests pass, ask the user if questions arise. + - Test end-to-end flow with sample control repository + - Verify all UI components render correctly + - Verify all API endpoints respond correctly + +## Notes + +- Tasks marked with `*` are optional property-based tests that can be skipped for faster MVP +- Each task references specific requirements for traceability +- Checkpoints ensure incremental validation +- Property tests validate universal correctness properties +- Unit tests validate specific examples and edge cases +- The implementation follows the existing integration plugin architecture +- Frontend components use Svelte 5 with TypeScript +- Backend uses Express with TypeScript diff --git a/.kiro/todo/inventory-multiple-source-tags-bug.md b/.kiro/todo/inventory-multiple-source-tags-bug.md index bfefbab..cd26098 100644 --- a/.kiro/todo/inventory-multiple-source-tags-bug.md +++ b/.kiro/todo/inventory-multiple-source-tags-bug.md @@ -39,4 +39,4 @@ The issue is likely in one of these areas: ## Priority -Medium - This affects the user experience and visibility of multi-source nodes, but doesn't break core functionality. \ No newline at end of file +Medium - This affects the user experience and visibility of multi-source nodes, but doesn't break core functionality. diff --git a/README.md b/README.md index 5834c2c..8acfe17 100644 --- a/README.md +++ b/README.md @@ -462,6 +462,7 @@ npm test --workspace=backend ### Version History = **v0.3.0**: Puppetserver integration, interface enhancements + - **v0.2.0**: PuppetDB integration, re-execution, expert mode enhancements - **v0.1.0**: Initial release with Bolt integration diff --git a/backend/.env.example b/backend/.env.example index 1805fa2..0b172f1 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -82,6 +82,33 @@ MAX_QUEUE_SIZE=50 # PUPPETSERVER_CIRCUIT_BREAKER_TIMEOUT=60000 # PUPPETSERVER_CIRCUIT_BREAKER_RESET_TIMEOUT=30000 +# Hiera integration configuration +# HIERA_ENABLED=true +# HIERA_CONTROL_REPO_PATH=/path/to/control-repo +# HIERA_CONFIG_PATH=hiera.yaml +# HIERA_ENVIRONMENTS=["production","development"] + +# Hiera fact source configuration +# HIERA_FACT_SOURCE_PREFER_PUPPETDB=true +# HIERA_FACT_SOURCE_LOCAL_PATH=/path/to/facts + +# Hiera catalog compilation configuration +# HIERA_CATALOG_COMPILATION_ENABLED=false +# HIERA_CATALOG_COMPILATION_TIMEOUT=60000 +# HIERA_CATALOG_COMPILATION_CACHE_TTL=300000 + +# Hiera cache configuration +# HIERA_CACHE_ENABLED=true +# HIERA_CACHE_TTL=300000 +# HIERA_CACHE_MAX_ENTRIES=10000 + +# Hiera code analysis configuration +# HIERA_CODE_ANALYSIS_ENABLED=true +# HIERA_CODE_ANALYSIS_LINT_ENABLED=true +# HIERA_CODE_ANALYSIS_MODULE_UPDATE_CHECK=true +# HIERA_CODE_ANALYSIS_INTERVAL=3600000 +# HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS=["**/vendor/**","**/fixtures/**"] + # OpenSSL Legacy Provider (for OpenSSL 3.0+ compatibility) # Note: This should be set in your shell environment or package.json scripts # export NODE_OPTIONS=--openssl-legacy-provider diff --git a/backend/package.json b/backend/package.json index caff8f5..dec0d91 100644 --- a/backend/package.json +++ b/backend/package.json @@ -17,12 +17,13 @@ "dotenv": "^16.4.5", "express": "^4.19.2", "sqlite3": "^5.1.7", + "yaml": "^2.8.2", "zod": "^3.23.8" }, "devDependencies": { "@types/cors": "^2.8.17", "@types/express": "^4.17.21", - "@types/node": "^20.12.7", + "@types/node": "^20.19.27", "@types/supertest": "^6.0.2", "fast-check": "^4.3.0", "supertest": "^7.0.0", diff --git a/backend/src/config/ConfigService.ts b/backend/src/config/ConfigService.ts index 8116203..1ac1d11 100644 --- a/backend/src/config/ConfigService.ts +++ b/backend/src/config/ConfigService.ts @@ -76,6 +76,33 @@ export class ConfigService { resetTimeout?: number; }; }; + hiera?: { + enabled: boolean; + controlRepoPath: string; + hieraConfigPath?: string; + environments?: string[]; + factSources?: { + preferPuppetDB?: boolean; + localFactsPath?: string; + }; + catalogCompilation?: { + enabled?: boolean; + timeout?: number; + cacheTTL?: number; + }; + cache?: { + enabled?: boolean; + ttl?: number; + maxEntries?: number; + }; + codeAnalysis?: { + enabled?: boolean; + lintEnabled?: boolean; + moduleUpdateCheck?: boolean; + analysisInterval?: number; + exclusionPatterns?: string[]; + }; + }; } { const integrations: ReturnType = {}; @@ -232,6 +259,125 @@ export class ConfigService { } } + // Parse Hiera configuration + if (process.env.HIERA_ENABLED === "true") { + const controlRepoPath = process.env.HIERA_CONTROL_REPO_PATH; + if (!controlRepoPath) { + throw new Error( + "HIERA_CONTROL_REPO_PATH is required when HIERA_ENABLED is true", + ); + } + + // Parse environments from JSON array + let environments: string[] | undefined; + if (process.env.HIERA_ENVIRONMENTS) { + try { + const parsed = JSON.parse(process.env.HIERA_ENVIRONMENTS) as unknown; + if (Array.isArray(parsed)) { + environments = parsed.filter( + (item): item is string => typeof item === "string", + ); + } + } catch { + throw new Error( + "HIERA_ENVIRONMENTS must be a valid JSON array of strings", + ); + } + } + + integrations.hiera = { + enabled: true, + controlRepoPath, + hieraConfigPath: process.env.HIERA_CONFIG_PATH, + environments, + }; + + // Parse fact source configuration + if ( + process.env.HIERA_FACT_SOURCE_PREFER_PUPPETDB !== undefined || + process.env.HIERA_FACT_SOURCE_LOCAL_PATH + ) { + integrations.hiera.factSources = { + preferPuppetDB: + process.env.HIERA_FACT_SOURCE_PREFER_PUPPETDB !== "false", + localFactsPath: process.env.HIERA_FACT_SOURCE_LOCAL_PATH, + }; + } + + // Parse catalog compilation configuration + if ( + process.env.HIERA_CATALOG_COMPILATION_ENABLED !== undefined || + process.env.HIERA_CATALOG_COMPILATION_TIMEOUT || + process.env.HIERA_CATALOG_COMPILATION_CACHE_TTL + ) { + integrations.hiera.catalogCompilation = { + enabled: process.env.HIERA_CATALOG_COMPILATION_ENABLED === "true", + timeout: process.env.HIERA_CATALOG_COMPILATION_TIMEOUT + ? parseInt(process.env.HIERA_CATALOG_COMPILATION_TIMEOUT, 10) + : undefined, + cacheTTL: process.env.HIERA_CATALOG_COMPILATION_CACHE_TTL + ? parseInt(process.env.HIERA_CATALOG_COMPILATION_CACHE_TTL, 10) + : undefined, + }; + } + + // Parse cache configuration + if ( + process.env.HIERA_CACHE_ENABLED !== undefined || + process.env.HIERA_CACHE_TTL || + process.env.HIERA_CACHE_MAX_ENTRIES + ) { + integrations.hiera.cache = { + enabled: process.env.HIERA_CACHE_ENABLED !== "false", + ttl: process.env.HIERA_CACHE_TTL + ? parseInt(process.env.HIERA_CACHE_TTL, 10) + : undefined, + maxEntries: process.env.HIERA_CACHE_MAX_ENTRIES + ? parseInt(process.env.HIERA_CACHE_MAX_ENTRIES, 10) + : undefined, + }; + } + + // Parse code analysis configuration + if ( + process.env.HIERA_CODE_ANALYSIS_ENABLED !== undefined || + process.env.HIERA_CODE_ANALYSIS_LINT_ENABLED !== undefined || + process.env.HIERA_CODE_ANALYSIS_MODULE_UPDATE_CHECK !== undefined || + process.env.HIERA_CODE_ANALYSIS_INTERVAL || + process.env.HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS + ) { + // Parse exclusion patterns from JSON array + let exclusionPatterns: string[] | undefined; + if (process.env.HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS) { + try { + const parsed = JSON.parse( + process.env.HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS, + ) as unknown; + if (Array.isArray(parsed)) { + exclusionPatterns = parsed.filter( + (item): item is string => typeof item === "string", + ); + } + } catch { + throw new Error( + "HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS must be a valid JSON array of strings", + ); + } + } + + integrations.hiera.codeAnalysis = { + enabled: process.env.HIERA_CODE_ANALYSIS_ENABLED !== "false", + lintEnabled: process.env.HIERA_CODE_ANALYSIS_LINT_ENABLED !== "false", + moduleUpdateCheck: + process.env.HIERA_CODE_ANALYSIS_MODULE_UPDATE_CHECK !== "false", + analysisInterval: process.env.HIERA_CODE_ANALYSIS_INTERVAL + ? parseInt(process.env.HIERA_CODE_ANALYSIS_INTERVAL, 10) + : undefined, + exclusionPatterns, + }; + } + } + return integrations; } @@ -467,4 +613,17 @@ export class ConfigService { } return null; } + + /** + * Get Hiera configuration if enabled + */ + public getHieraConfig(): + | (typeof this.config.integrations.hiera & { enabled: true }) + | null { + const hiera = this.config.integrations.hiera; + if (hiera?.enabled) { + return hiera as typeof hiera & { enabled: true }; + } + return null; + } } diff --git a/backend/src/config/schema.ts b/backend/src/config/schema.ts index a6fa34f..d37c78b 100644 --- a/backend/src/config/schema.ts +++ b/backend/src/config/schema.ts @@ -167,13 +167,94 @@ export const PuppetserverConfigSchema = z.object({ export type PuppetserverConfig = z.infer; +/** + * Hiera fact source configuration schema + */ +export const HieraFactSourceConfigSchema = z.object({ + preferPuppetDB: z.boolean().default(true), + localFactsPath: z.string().optional(), +}); + +export type HieraFactSourceConfig = z.infer; + +/** + * Hiera catalog compilation configuration schema + */ +export const HieraCatalogCompilationConfigSchema = z.object({ + enabled: z.boolean().default(false), + timeout: z.number().int().positive().default(60000), // 60 seconds + cacheTTL: z.number().int().positive().default(300000), // 5 minutes +}); + +export type HieraCatalogCompilationConfig = z.infer< + typeof HieraCatalogCompilationConfigSchema +>; + +/** + * Hiera cache configuration schema + */ +export const HieraCacheConfigSchema = z.object({ + enabled: z.boolean().default(true), + ttl: z.number().int().positive().default(300000), // 5 minutes + maxEntries: z.number().int().positive().default(10000), +}); + +export type HieraCacheConfig = z.infer; + +/** + * Hiera code analysis configuration schema + */ +export const HieraCodeAnalysisConfigSchema = z.object({ + enabled: z.boolean().default(true), + lintEnabled: z.boolean().default(true), + moduleUpdateCheck: z.boolean().default(true), + analysisInterval: z.number().int().positive().default(3600000), // 1 hour + exclusionPatterns: z.array(z.string()).default([]), +}); + +export type HieraCodeAnalysisConfig = z.infer< + typeof HieraCodeAnalysisConfigSchema +>; + +/** + * Hiera integration configuration schema + */ +export const HieraConfigSchema = z.object({ + enabled: z.boolean().default(false), + controlRepoPath: z.string(), + hieraConfigPath: z.string().default("hiera.yaml"), + environments: z.array(z.string()).default(["production"]), + factSources: HieraFactSourceConfigSchema.default({ + preferPuppetDB: true, + }), + catalogCompilation: HieraCatalogCompilationConfigSchema.default({ + enabled: false, + timeout: 60000, + cacheTTL: 300000, + }), + cache: HieraCacheConfigSchema.default({ + enabled: true, + ttl: 300000, + maxEntries: 10000, + }), + codeAnalysis: HieraCodeAnalysisConfigSchema.default({ + enabled: true, + lintEnabled: true, + moduleUpdateCheck: true, + analysisInterval: 3600000, + exclusionPatterns: [], + }), +}); + +export type HieraConfig = z.infer; + /** * Integrations configuration schema */ export const IntegrationsConfigSchema = z.object({ puppetdb: PuppetDBConfigSchema.optional(), puppetserver: PuppetserverConfigSchema.optional(), - // Future integrations: ansible, terraform, etc. + hiera: HieraConfigSchema.optional(), }); export type IntegrationsConfig = z.infer; diff --git a/backend/src/integrations/hiera/CatalogCompiler.ts b/backend/src/integrations/hiera/CatalogCompiler.ts new file mode 100644 index 0000000..c4f4140 --- /dev/null +++ b/backend/src/integrations/hiera/CatalogCompiler.ts @@ -0,0 +1,491 @@ +/** + * CatalogCompiler + * + * Compiles Puppet catalogs for nodes to extract code-defined variables + * that can be used in Hiera resolution. This enables resolution of + * Hiera keys that depend on variables defined in Puppet code. + * + * Requirements: 12.2, 12.3, 12.4, 12.6 + */ + +import type { IntegrationManager } from "../IntegrationManager"; +import type { InformationSourcePlugin } from "../types"; +import type { CatalogCompilationConfig, Facts } from "./types"; + +/** + * Compiled catalog result with extracted variables + */ +export interface CompiledCatalogResult { + /** Node identifier */ + nodeId: string; + /** Environment used for compilation */ + environment: string; + /** Variables extracted from the catalog */ + variables: Record; + /** Classes included in the catalog */ + classes: string[]; + /** Timestamp when catalog was compiled */ + compiledAt: string; + /** Whether compilation was successful */ + success: boolean; + /** Warning messages if any */ + warnings?: string[]; + /** Error message if compilation failed */ + error?: string; +} + +/** + * Cache entry for compiled catalogs + */ +interface CatalogCacheEntry { + result: CompiledCatalogResult; + cachedAt: number; + expiresAt: number; +} + +/** + * CatalogCompiler + * + * Compiles catalogs using Puppetserver and extracts code-defined variables. + * Implements caching to improve performance. + */ +export class CatalogCompiler { + private integrationManager: IntegrationManager; + private config: CatalogCompilationConfig; + private cache: Map = new Map(); + + constructor( + integrationManager: IntegrationManager, + config: CatalogCompilationConfig + ) { + this.integrationManager = integrationManager; + this.config = config; + } + + /** + * Check if catalog compilation is enabled + */ + isEnabled(): boolean { + return this.config.enabled; + } + + /** + * Compile a catalog for a node and extract variables + * + * @param nodeId - Node identifier (certname) + * @param environment - Puppet environment + * @param facts - Node facts for compilation + * @returns Compiled catalog result with extracted variables + * + * Requirements: 12.3 + */ + async compileCatalog( + nodeId: string, + environment: string, + facts: Facts + ): Promise { + if (!this.config.enabled) { + return this.createDisabledResult(nodeId, environment); + } + + // Check cache first + const cacheKey = this.buildCacheKey(nodeId, environment); + const cached = this.getCachedResult(cacheKey); + if (cached) { + this.log(`Returning cached catalog for node '${nodeId}' in environment '${environment}'`); + return cached; + } + + // Get Puppetserver service + const puppetserver = this.getPuppetserverService(); + if (!puppetserver) { + return this.createFailedResult( + nodeId, + environment, + "Puppetserver integration not available for catalog compilation" + ); + } + + try { + this.log(`Compiling catalog for node '${nodeId}' in environment '${environment}'`); + + // Compile catalog with timeout + const catalog = await this.compileWithTimeout( + puppetserver, + nodeId, + environment + ); + + if (!catalog) { + return this.createFailedResult( + nodeId, + environment, + "Catalog compilation returned null" + ); + } + + // Extract variables and classes from catalog + const variables = this.extractVariables(catalog); + const classes = this.extractClasses(catalog); + + const result: CompiledCatalogResult = { + nodeId, + environment, + variables, + classes, + compiledAt: new Date().toISOString(), + success: true, + }; + + // Cache the result + this.cacheResult(cacheKey, result); + + this.log( + `Successfully compiled catalog for node '${nodeId}': ` + + `${Object.keys(variables).length} variables, ${classes.length} classes` + ); + + return result; + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.log(`Catalog compilation failed for node '${nodeId}': ${errorMessage}`, "warn"); + + return this.createFailedResult(nodeId, environment, errorMessage); + } + } + + /** + * Get variables for a node from compiled catalog + * + * Returns cached variables if available, otherwise compiles the catalog. + * + * @param nodeId - Node identifier + * @param environment - Puppet environment + * @param facts - Node facts + * @returns Variables extracted from catalog, or empty object if compilation fails + * + * Requirements: 12.3, 12.4 + */ + async getVariables( + nodeId: string, + environment: string, + facts: Facts + ): Promise<{ variables: Record; warnings?: string[] }> { + const result = await this.compileCatalog(nodeId, environment, facts); + + if (!result.success) { + // Return empty variables with warning (fallback behavior) + return { + variables: {}, + warnings: [ + `Catalog compilation failed for node '${nodeId}': ${result.error ?? "Unknown error"}. ` + + "Using fact-only resolution." + ], + }; + } + + return { + variables: result.variables, + warnings: result.warnings, + }; + } + + /** + * Compile catalog with timeout + * + * @param puppetserver - Puppetserver service + * @param nodeId - Node identifier + * @param environment - Puppet environment + * @returns Compiled catalog or null + */ + private async compileWithTimeout( + puppetserver: InformationSourcePlugin, + nodeId: string, + environment: string + ): Promise { + const timeoutMs = this.config.timeout; + + return new Promise((resolve, reject) => { + const timeoutId = setTimeout(() => { + reject(new Error(`Catalog compilation timed out after ${timeoutMs}ms`)); + }, timeoutMs); + + // Use getNodeData with 'catalog' type to get compiled catalog + // The Puppetserver service's compileCatalog method is accessed via getNodeData + this.compileCatalogViaService(puppetserver, nodeId, environment) + .then((result) => { + clearTimeout(timeoutId); + resolve(result); + }) + .catch((error) => { + clearTimeout(timeoutId); + reject(error); + }); + }); + } + + /** + * Compile catalog via Puppetserver service + * + * @param puppetserver - Puppetserver service + * @param nodeId - Node identifier + * @param environment - Puppet environment + * @returns Compiled catalog + */ + private async compileCatalogViaService( + puppetserver: InformationSourcePlugin, + nodeId: string, + environment: string + ): Promise { + // Check if the service has a compileCatalog method + const service = puppetserver as unknown as { + compileCatalog?: (certname: string, environment: string) => Promise; + }; + + if (typeof service.compileCatalog === "function") { + return service.compileCatalog(nodeId, environment); + } + + // Fallback to getNodeData with 'catalog' type + return puppetserver.getNodeData(nodeId, "catalog"); + } + + /** + * Extract variables from a compiled catalog + * + * Extracts class parameters and resource parameters that can be used + * as variables in Hiera resolution. + * + * @param catalog - Compiled catalog + * @returns Extracted variables + */ + private extractVariables(catalog: unknown): Record { + const variables: Record = {}; + + if (!catalog || typeof catalog !== "object") { + return variables; + } + + const catalogObj = catalog as { + resources?: Array<{ + type: string; + title: string; + parameters?: Record; + }>; + classes?: string[]; + environment?: string; + }; + + // Extract class parameters from Class resources + if (Array.isArray(catalogObj.resources)) { + for (const resource of catalogObj.resources) { + if (resource.type === "Class" && resource.parameters) { + // Store class parameters as variables + // Format: classname::parameter + const className = resource.title.toLowerCase(); + for (const [paramName, paramValue] of Object.entries(resource.parameters)) { + const varName = `${className}::${paramName}`; + variables[varName] = paramValue; + } + } + } + } + + // Add environment as a variable + if (catalogObj.environment) { + variables["environment"] = catalogObj.environment; + } + + return variables; + } + + /** + * Extract class names from a compiled catalog + * + * @param catalog - Compiled catalog + * @returns Array of class names + */ + private extractClasses(catalog: unknown): string[] { + const classes: string[] = []; + + if (!catalog || typeof catalog !== "object") { + return classes; + } + + const catalogObj = catalog as { + resources?: Array<{ + type: string; + title: string; + }>; + classes?: string[]; + }; + + // Extract from classes array if present + if (Array.isArray(catalogObj.classes)) { + classes.push(...catalogObj.classes.map((c) => c.toLowerCase())); + } + + // Extract from Class resources + if (Array.isArray(catalogObj.resources)) { + for (const resource of catalogObj.resources) { + if (resource.type === "Class") { + const className = resource.title.toLowerCase(); + if (!classes.includes(className)) { + classes.push(className); + } + } + } + } + + return classes; + } + + /** + * Get Puppetserver service from integration manager + */ + private getPuppetserverService(): InformationSourcePlugin | null { + return this.integrationManager.getInformationSource("puppetserver"); + } + + /** + * Build cache key for a node and environment + */ + private buildCacheKey(nodeId: string, environment: string): string { + return `${nodeId}:${environment}`; + } + + /** + * Get cached result if not expired + */ + private getCachedResult(cacheKey: string): CompiledCatalogResult | null { + const entry = this.cache.get(cacheKey); + if (!entry) { + return null; + } + + if (Date.now() > entry.expiresAt) { + this.cache.delete(cacheKey); + return null; + } + + return entry.result; + } + + /** + * Cache a compilation result + */ + private cacheResult(cacheKey: string, result: CompiledCatalogResult): void { + const now = Date.now(); + this.cache.set(cacheKey, { + result, + cachedAt: now, + expiresAt: now + this.config.cacheTTL, + }); + } + + /** + * Create a result for when compilation is disabled + */ + private createDisabledResult( + nodeId: string, + environment: string + ): CompiledCatalogResult { + return { + nodeId, + environment, + variables: {}, + classes: [], + compiledAt: new Date().toISOString(), + success: false, + error: "Catalog compilation is disabled", + }; + } + + /** + * Create a failed result + */ + private createFailedResult( + nodeId: string, + environment: string, + error: string + ): CompiledCatalogResult { + return { + nodeId, + environment, + variables: {}, + classes: [], + compiledAt: new Date().toISOString(), + success: false, + error, + }; + } + + /** + * Clear the cache + */ + clearCache(): void { + this.cache.clear(); + this.log("Catalog cache cleared"); + } + + /** + * Invalidate cache for a specific node + */ + invalidateNode(nodeId: string): void { + const keysToDelete: string[] = []; + for (const key of this.cache.keys()) { + if (key.startsWith(`${nodeId}:`)) { + keysToDelete.push(key); + } + } + for (const key of keysToDelete) { + this.cache.delete(key); + } + if (keysToDelete.length > 0) { + this.log(`Invalidated ${keysToDelete.length} cache entries for node '${nodeId}'`); + } + } + + /** + * Get cache statistics + */ + getCacheStats(): { + size: number; + enabled: boolean; + cacheTTL: number; + } { + return { + size: this.cache.size, + enabled: this.config.enabled, + cacheTTL: this.config.cacheTTL, + }; + } + + /** + * Update configuration + */ + updateConfig(config: CatalogCompilationConfig): void { + this.config = config; + // Clear cache when config changes + this.clearCache(); + this.log(`Configuration updated: enabled=${config.enabled}, timeout=${config.timeout}ms, cacheTTL=${config.cacheTTL}ms`); + } + + /** + * Log a message + */ + private log(message: string, level: "info" | "warn" | "error" = "info"): void { + const prefix = "[CatalogCompiler]"; + switch (level) { + case "warn": + console.warn(prefix, message); + break; + case "error": + console.error(prefix, message); + break; + default: + // eslint-disable-next-line no-console + console.log(prefix, message); + } + } +} diff --git a/backend/src/integrations/hiera/CodeAnalyzer.ts b/backend/src/integrations/hiera/CodeAnalyzer.ts new file mode 100644 index 0000000..e77c8b5 --- /dev/null +++ b/backend/src/integrations/hiera/CodeAnalyzer.ts @@ -0,0 +1,1230 @@ +/** + * CodeAnalyzer + * + * Performs static analysis of Puppet code in a control repository. + * Detects unused code, lint issues, and provides usage statistics. + * + * Requirements: 8.1, 8.2, 8.3, 8.4, 8.5, 9.1, 9.2, 9.3, 9.4, 9.5, 15.3 + */ + +import * as fs from "fs"; +import * as path from "path"; +import type { + CodeAnalysisResult, + UnusedCodeReport, + UnusedItem, + LintIssue, + LintSeverity, + ModuleUpdate, + UsageStatistics, + ClassUsage, + ResourceUsage, + CodeAnalysisConfig, +} from "./types"; +import type { IntegrationManager } from "../IntegrationManager"; +import type { HieraScanner } from "./HieraScanner"; +import { PuppetfileParser } from "./PuppetfileParser"; +import type { PuppetfileParseResult, ParsedModule } from "./PuppetfileParser"; +import { ForgeClient } from "./ForgeClient"; +import type { ModuleUpdateCheckResult } from "./ForgeClient"; + +/** + * Cache entry for analysis results + */ +interface AnalysisCacheEntry { + value: T; + cachedAt: number; + expiresAt: number; +} + +/** + * Parsed Puppet class information + */ +interface PuppetClass { + name: string; + file: string; + line: number; + parameters: string[]; +} + +/** + * Parsed Puppet defined type information + */ +interface PuppetDefinedType { + name: string; + file: string; + line: number; + parameters: string[]; +} + +/** + * Parsed Puppet manifest information + */ +interface ManifestInfo { + file: string; + classes: PuppetClass[]; + definedTypes: PuppetDefinedType[]; + resources: ResourceInfo[]; + includes: string[]; + hieraLookups: string[]; + linesOfCode: number; +} + +/** + * Resource information from manifest + */ +interface ResourceInfo { + type: string; + title: string; + file: string; + line: number; +} + +/** + * Filter options for lint issues + */ +export interface LintFilterOptions { + severity?: LintSeverity[]; + types?: string[]; +} + +/** + * Issue counts by category + */ +export interface IssueCounts { + bySeverity: Record; + byRule: Record; + total: number; +} + +/** + * CodeAnalyzer class for static analysis of Puppet code + */ +export class CodeAnalyzer { + private controlRepoPath: string; + private config: CodeAnalysisConfig; + private hieraScanner: HieraScanner | null = null; + private integrationManager: IntegrationManager | null = null; + + // Cache storage + private analysisCache: AnalysisCacheEntry | null = null; + private manifestCache: Map = new Map(); + private lastPuppetfileParseResult: PuppetfileParseResult | null = null; + private lastModuleUpdateResults: ModuleUpdateCheckResult[] | null = null; + private forgeClient: ForgeClient; + + // Parsed data + private classes: Map = new Map(); + private definedTypes: Map = new Map(); + private manifests: ManifestInfo[] = []; + private initialized = false; + + constructor(controlRepoPath: string, config: CodeAnalysisConfig) { + this.controlRepoPath = controlRepoPath; + this.config = config; + this.forgeClient = new ForgeClient(); + } + + /** + * Set the IntegrationManager for accessing PuppetDB data + */ + setIntegrationManager(manager: IntegrationManager): void { + this.integrationManager = manager; + } + + /** + * Set the HieraScanner for Hiera key analysis + */ + setHieraScanner(scanner: HieraScanner): void { + this.hieraScanner = scanner; + } + + /** + * Initialize the analyzer by scanning the control repository + */ + async initialize(): Promise { + if (this.initialized) { + return; + } + + this.log("Initializing CodeAnalyzer..."); + + // Scan manifests directory + const manifestsPath = this.resolvePath("manifests"); + if (fs.existsSync(manifestsPath)) { + await this.scanManifestsDirectory(manifestsPath, "manifests"); + } + + // Scan site-modules directory (common in control repos) + const siteModulesPath = this.resolvePath("site-modules"); + if (fs.existsSync(siteModulesPath)) { + await this.scanModulesDirectory(siteModulesPath); + } + + // Scan site directory (alternative structure) + const sitePath = this.resolvePath("site"); + if (fs.existsSync(sitePath)) { + await this.scanModulesDirectory(sitePath); + } + + // Scan modules directory + const modulesPath = this.resolvePath("modules"); + if (fs.existsSync(modulesPath)) { + await this.scanModulesDirectory(modulesPath); + } + + this.initialized = true; + this.log(`CodeAnalyzer initialized: ${this.classes.size} classes, ${this.definedTypes.size} defined types`); + } + + /** + * Check if the analyzer is initialized + */ + isInitialized(): boolean { + return this.initialized; + } + + + // ============================================================================ + // Main Analysis Methods + // ============================================================================ + + /** + * Perform complete code analysis + * + * @returns Complete analysis result + */ + async analyze(): Promise { + this.ensureInitialized(); + + // Check cache + if (this.analysisCache && !this.isCacheExpired(this.analysisCache)) { + return this.analysisCache.value; + } + + const result: CodeAnalysisResult = { + unusedCode: await this.getUnusedCode(), + lintIssues: this.config.lintEnabled ? await this.getLintIssues() : [], + moduleUpdates: this.config.moduleUpdateCheck ? await this.getModuleUpdates() : [], + statistics: await this.getUsageStatistics(), + analyzedAt: new Date().toISOString(), + }; + + // Cache the result + if (this.config.enabled) { + this.analysisCache = this.createCacheEntry(result); + } + + return result; + } + + /** + * Get unused code report + * + * Requirements: 8.1, 8.2, 8.3, 8.4 + */ + async getUnusedCode(): Promise { + this.ensureInitialized(); + + const unusedClasses = await this.detectUnusedClasses(); + const unusedDefinedTypes = await this.detectUnusedDefinedTypes(); + const unusedHieraKeys = await this.detectUnusedHieraKeys(); + + return { + unusedClasses, + unusedDefinedTypes, + unusedHieraKeys, + }; + } + + /** + * Get lint issues + * + * Requirements: 9.1, 9.2, 9.3 + */ + async getLintIssues(): Promise { + this.ensureInitialized(); + + const issues: LintIssue[] = []; + + // Scan all manifest files for issues + for (const manifest of this.manifests) { + const fileIssues = await this.lintManifest(manifest.file); + issues.push(...fileIssues); + } + + return issues; + } + + /** + * Get module updates + * + * Requirements: 10.1, 10.2, 10.5 + */ + async getModuleUpdates(): Promise { + // Parse Puppetfile if it exists + const puppetfilePath = this.resolvePath("Puppetfile"); + if (!fs.existsSync(puppetfilePath)) { + return []; + } + + const parser = new PuppetfileParser(); + const parseResult = parser.parseFile(puppetfilePath); + + // Store parse result for error reporting + this.lastPuppetfileParseResult = parseResult; + + if (!parseResult.success) { + this.log(`Puppetfile parse errors: ${parseResult.errors.map(e => e.message).join(", ")}`, "warn"); + } + + // Check for updates from Puppet Forge + if (this.config.moduleUpdateCheck && parseResult.modules.length > 0) { + try { + const updateResults = await this.forgeClient.checkForUpdates(parseResult.modules); + this.lastModuleUpdateResults = updateResults; + return this.forgeClient.toModuleUpdates(updateResults); + } catch (error) { + this.log(`Failed to check for module updates: ${this.getErrorMessage(error)}`, "warn"); + // Fall back to basic module info without update check + return parser.toModuleUpdates(parseResult.modules); + } + } + + // Convert to ModuleUpdate format without update check + return parser.toModuleUpdates(parseResult.modules); + } + + /** + * Get the last Puppetfile parse result (for error reporting) + */ + getPuppetfileParseResult(): PuppetfileParseResult | null { + return this.lastPuppetfileParseResult; + } + + /** + * Get the last module update check results (for detailed info) + */ + getModuleUpdateResults(): ModuleUpdateCheckResult[] | null { + return this.lastModuleUpdateResults; + } + + /** + * Get usage statistics + * + * Requirements: 11.1, 11.2, 11.3, 11.5 + */ + async getUsageStatistics(): Promise { + this.ensureInitialized(); + + // Calculate lines of code + let totalLinesOfCode = 0; + for (const manifest of this.manifests) { + totalLinesOfCode += manifest.linesOfCode; + } + + // Count resources by type + const resourceCounts = new Map(); + for (const manifest of this.manifests) { + for (const resource of manifest.resources) { + const count = resourceCounts.get(resource.type) || 0; + resourceCounts.set(resource.type, count + 1); + } + } + + // Build most used resources list (ranked by count) + const mostUsedResources: ResourceUsage[] = Array.from(resourceCounts.entries()) + .map(([type, count]) => ({ type, count })) + .sort((a, b) => b.count - a.count) + .slice(0, 10); + + // Get class usage across nodes (from PuppetDB catalogs if available) + const mostUsedClasses = await this.getClassUsageAcrossNodes(); + + return { + totalManifests: this.manifests.length, + totalClasses: this.classes.size, + totalDefinedTypes: this.definedTypes.size, + totalFunctions: this.countFunctions(), + linesOfCode: totalLinesOfCode, + mostUsedClasses, + mostUsedResources, + }; + } + + /** + * Get class usage across nodes from PuppetDB catalogs + * + * Counts how many nodes include each class and ranks by frequency. + * + * Requirements: 11.1, 11.5 + */ + async getClassUsageAcrossNodes(): Promise { + // Track class usage: className -> Set of nodeIds + const classUsageCounts = new Map>(); + + // Try to get class usage from PuppetDB catalogs + if (this.integrationManager) { + const puppetdb = this.integrationManager.getInformationSource("puppetdb"); + + if (puppetdb && puppetdb.isInitialized()) { + try { + // Get all nodes from PuppetDB + const inventory = await puppetdb.getInventory(); + + for (const node of inventory) { + const nodeId = node.certname || node.id; + + try { + // Get catalog for each node + const catalogData = await puppetdb.getNodeData(nodeId, "catalog"); + + if (catalogData && typeof catalogData === "object") { + const catalog = catalogData as { resources?: Array<{ type: string; title: string }> }; + + if (catalog.resources && Array.isArray(catalog.resources)) { + // Extract Class resources + for (const resource of catalog.resources) { + if (resource.type === "Class") { + const className = resource.title.toLowerCase(); + + if (!classUsageCounts.has(className)) { + classUsageCounts.set(className, new Set()); + } + classUsageCounts.get(className)!.add(nodeId); + } + } + } + } + } catch (error) { + // Skip nodes where catalog retrieval fails + this.log(`Failed to get catalog for node ${nodeId}: ${this.getErrorMessage(error)}`, "warn"); + } + } + } catch (error) { + this.log(`Failed to get nodes from PuppetDB: ${this.getErrorMessage(error)}`, "warn"); + } + } + } + + // If no PuppetDB data, fall back to manifest-based analysis + if (classUsageCounts.size === 0) { + return this.getClassUsageFromManifests(); + } + + // Build most used classes list (ranked by usage count) + const mostUsedClasses: ClassUsage[] = Array.from(classUsageCounts.entries()) + .map(([name, nodes]) => ({ + name, + usageCount: nodes.size, + nodes: Array.from(nodes), + })) + .sort((a, b) => b.usageCount - a.usageCount) + .slice(0, 10); + + return mostUsedClasses; + } + + /** + * Get class usage from manifest includes (fallback when PuppetDB unavailable) + * + * Counts class usage based on include statements in manifests. + */ + private getClassUsageFromManifests(): ClassUsage[] { + const classUsageCounts = new Map>(); + + for (const manifest of this.manifests) { + for (const includedClass of manifest.includes) { + if (!classUsageCounts.has(includedClass)) { + classUsageCounts.set(includedClass, new Set()); + } + classUsageCounts.get(includedClass)!.add(manifest.file); + } + } + + // Build most used classes list (ranked by usage count) + const mostUsedClasses: ClassUsage[] = Array.from(classUsageCounts.entries()) + .map(([name, files]) => ({ + name, + usageCount: files.size, + nodes: [], // No node data available from manifest analysis + })) + .sort((a, b) => b.usageCount - a.usageCount) + .slice(0, 10); + + return mostUsedClasses; + } + + /** + * Count functions in the control repository + * + * Scans lib/puppet/functions directories for function definitions. + * + * Requirements: 11.2 + */ + private countFunctions(): number { + let functionCount = 0; + + // Check common function locations + const functionPaths = [ + "lib/puppet/functions", + "site-modules/*/lib/puppet/functions", + "modules/*/lib/puppet/functions", + ]; + + for (const pattern of functionPaths) { + const basePath = pattern.split("*")[0]; + const fullBasePath = this.resolvePath(basePath); + + if (fs.existsSync(fullBasePath)) { + functionCount += this.countRubyFilesRecursive(fullBasePath); + } + } + + return functionCount; + } + + /** + * Count Ruby files recursively in a directory + */ + private countRubyFilesRecursive(dirPath: string): number { + let count = 0; + + try { + const entries = fs.readdirSync(dirPath, { withFileTypes: true }); + + for (const entry of entries) { + const entryPath = path.join(dirPath, entry.name); + + if (entry.isDirectory()) { + count += this.countRubyFilesRecursive(entryPath); + } else if (entry.isFile() && entry.name.endsWith(".rb")) { + count++; + } + } + } catch { + // Ignore errors reading directories + } + + return count; + } + + + // ============================================================================ + // Unused Code Detection + // ============================================================================ + + /** + * Detect unused classes + * + * A class is considered unused if it's not included by any other manifest. + * + * Requirements: 8.1, 8.4 + */ + private async detectUnusedClasses(): Promise { + const unusedClasses: UnusedItem[] = []; + + // Collect all included classes + const includedClasses = new Set(); + for (const manifest of this.manifests) { + for (const includedClass of manifest.includes) { + includedClasses.add(includedClass.toLowerCase()); + } + } + + // Find classes that are never included + for (const [className, classInfo] of this.classes) { + const lowerName = className.toLowerCase(); + + // Check exclusion patterns + if (this.isExcluded(className)) { + continue; + } + + // Skip main classes (e.g., role::*, profile::*) as they're typically included from node definitions + // which may not be in the control repo + if (!includedClasses.has(lowerName)) { + unusedClasses.push({ + name: className, + file: classInfo.file, + line: classInfo.line, + type: "class", + }); + } + } + + return unusedClasses; + } + + /** + * Detect unused defined types + * + * A defined type is considered unused if it's not instantiated anywhere. + * + * Requirements: 8.2, 8.4 + */ + private async detectUnusedDefinedTypes(): Promise { + const unusedDefinedTypes: UnusedItem[] = []; + + // Collect all instantiated defined types + const instantiatedTypes = new Set(); + for (const manifest of this.manifests) { + for (const resource of manifest.resources) { + // Defined types are used as resource types + instantiatedTypes.add(resource.type.toLowerCase()); + } + } + + // Find defined types that are never instantiated + for (const [typeName, typeInfo] of this.definedTypes) { + const lowerName = typeName.toLowerCase(); + + // Check exclusion patterns + if (this.isExcluded(typeName)) { + continue; + } + + if (!instantiatedTypes.has(lowerName)) { + unusedDefinedTypes.push({ + name: typeName, + file: typeInfo.file, + line: typeInfo.line, + type: "defined_type", + }); + } + } + + return unusedDefinedTypes; + } + + /** + * Detect unused Hiera keys + * + * A Hiera key is considered unused if it's not referenced in any manifest. + * + * Requirements: 8.3, 8.4 + */ + private async detectUnusedHieraKeys(): Promise { + const unusedHieraKeys: UnusedItem[] = []; + + if (!this.hieraScanner) { + return unusedHieraKeys; + } + + // Collect all Hiera lookups from manifests + const referencedKeys = new Set(); + for (const manifest of this.manifests) { + for (const key of manifest.hieraLookups) { + referencedKeys.add(key.toLowerCase()); + } + } + + // Get all Hiera keys from scanner + const allKeys = this.hieraScanner.getAllKeys(); + + // Find keys that are never referenced + for (const key of allKeys) { + const lowerName = key.name.toLowerCase(); + + // Check exclusion patterns + if (this.isExcluded(key.name)) { + continue; + } + + if (!referencedKeys.has(lowerName)) { + // Get the first location for file/line info + const location = key.locations[0]; + unusedHieraKeys.push({ + name: key.name, + file: location?.file ?? "unknown", + line: location?.lineNumber ?? 0, + type: "hiera_key", + }); + } + } + + return unusedHieraKeys; + } + + /** + * Check if a name matches any exclusion pattern + * + * Requirements: 8.5 + */ + private isExcluded(name: string): boolean { + const patterns = this.config.exclusionPatterns ?? []; + + for (const pattern of patterns) { + // Support glob-like patterns with * wildcard + const regex = new RegExp( + "^" + pattern.replace(/\*/g, ".*").replace(/\?/g, ".") + "$", + "i" + ); + if (regex.test(name)) { + return true; + } + } + + return false; + } + + + // ============================================================================ + // Lint Issue Detection + // ============================================================================ + + /** + * Lint a single manifest file + * + * Detects syntax errors and common style violations. + * + * Requirements: 9.1, 9.2 + */ + private async lintManifest(filePath: string): Promise { + const issues: LintIssue[] = []; + const fullPath = this.resolvePath(filePath); + + let content: string; + try { + content = fs.readFileSync(fullPath, "utf-8"); + } catch { + return issues; + } + + const lines = content.split("\n"); + + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + const lineNumber = i + 1; + + // Check for trailing whitespace + if (/\s+$/.test(line)) { + issues.push({ + file: filePath, + line: lineNumber, + column: line.length - line.trimEnd().length + 1, + severity: "warning", + message: "Trailing whitespace detected", + rule: "trailing_whitespace", + fixable: true, + }); + } + + // Check for tabs (prefer spaces) + const tabMatch = line.match(/\t/); + if (tabMatch) { + issues.push({ + file: filePath, + line: lineNumber, + column: tabMatch.index! + 1, + severity: "warning", + message: "Tab character found, use spaces for indentation", + rule: "no_tabs", + fixable: true, + }); + } + + // Check for lines over 140 characters + if (line.length > 140) { + issues.push({ + file: filePath, + line: lineNumber, + column: 141, + severity: "warning", + message: `Line exceeds 140 characters (${line.length})`, + rule: "line_length", + fixable: false, + }); + } + + // Check for deprecated syntax: import statement + if (/^\s*import\s+/.test(line)) { + issues.push({ + file: filePath, + line: lineNumber, + column: 1, + severity: "warning", + message: "The 'import' statement is deprecated, use 'include' instead", + rule: "deprecated_import", + fixable: true, + }); + } + + // Check for unquoted resource titles + const unquotedTitleMatch = line.match(/^\s*(\w+)\s*{\s*([^'":\s][^:]*)\s*:/); + if (unquotedTitleMatch && !["class", "define", "node"].includes(unquotedTitleMatch[1])) { + issues.push({ + file: filePath, + line: lineNumber, + column: unquotedTitleMatch.index! + unquotedTitleMatch[1].length + 3, + severity: "warning", + message: "Resource title should be quoted", + rule: "unquoted_resource_title", + fixable: true, + }); + } + + // Check for double-quoted strings that could be single-quoted + const doubleQuoteMatch = line.match(/"([^"$\\]*)"/); + if (doubleQuoteMatch && !doubleQuoteMatch[1].includes("'")) { + issues.push({ + file: filePath, + line: lineNumber, + column: doubleQuoteMatch.index! + 1, + severity: "info", + message: "Use single quotes for strings without interpolation", + rule: "single_quote_string_with_variables", + fixable: true, + }); + } + + // Check for ensure => present/absent not being first attribute + if (/^\s+\w+\s*=>\s*/.test(line) && !/^\s+ensure\s*=>/.test(line)) { + // Look back to see if this is a resource block without ensure first + const prevLines = lines.slice(Math.max(0, i - 5), i).join("\n"); + if (/{\s*$/.test(prevLines) && !/ensure\s*=>/.test(prevLines)) { + // This is a heuristic - ensure should typically be first + } + } + + // Check for syntax errors: unmatched braces + const openBraces = (line.match(/{/g) || []).length; + const closeBraces = (line.match(/}/g) || []).length; + // This is a simple check - real syntax validation would need a parser + + // Check for empty class/define bodies + if (/^\s*(class|define)\s+[\w:]+\s*{\s*}\s*$/.test(line)) { + issues.push({ + file: filePath, + line: lineNumber, + column: 1, + severity: "info", + message: "Empty class or defined type body", + rule: "empty_class_body", + fixable: false, + }); + } + } + + // Check for missing documentation + if (!content.includes("# @summary") && !content.includes("# @description")) { + const classMatch = content.match(/^\s*class\s+([\w:]+)/m); + if (classMatch) { + issues.push({ + file: filePath, + line: 1, + column: 1, + severity: "info", + message: `Class '${classMatch[1]}' is missing documentation (@summary)`, + rule: "missing_documentation", + fixable: false, + }); + } + } + + return issues; + } + + /** + * Filter lint issues by criteria + * + * Requirements: 9.4 + */ + filterIssues(issues: LintIssue[], options: LintFilterOptions): LintIssue[] { + let filtered = issues; + + if (options.severity && options.severity.length > 0) { + filtered = filtered.filter((issue) => options.severity!.includes(issue.severity)); + } + + if (options.types && options.types.length > 0) { + filtered = filtered.filter((issue) => options.types!.includes(issue.rule)); + } + + return filtered; + } + + /** + * Count issues by category + * + * Requirements: 9.5 + */ + countIssues(issues: LintIssue[]): IssueCounts { + const bySeverity: Record = { + error: 0, + warning: 0, + info: 0, + }; + + const byRule: Record = {}; + + for (const issue of issues) { + bySeverity[issue.severity]++; + byRule[issue.rule] = (byRule[issue.rule] || 0) + 1; + } + + return { + bySeverity, + byRule, + total: issues.length, + }; + } + + + // ============================================================================ + // Manifest Scanning + // ============================================================================ + + /** + * Scan a manifests directory + */ + private async scanManifestsDirectory(dirPath: string, relativePath: string): Promise { + let entries: fs.Dirent[]; + + try { + entries = fs.readdirSync(dirPath, { withFileTypes: true }); + } catch (error) { + this.log(`Failed to read directory ${dirPath}: ${this.getErrorMessage(error)}`, "warn"); + return; + } + + for (const entry of entries) { + const entryPath = path.join(dirPath, entry.name); + const entryRelativePath = path.join(relativePath, entry.name); + + if (entry.isDirectory()) { + await this.scanManifestsDirectory(entryPath, entryRelativePath); + } else if (entry.isFile() && entry.name.endsWith(".pp")) { + await this.scanManifestFile(entryPath, entryRelativePath); + } + } + } + + /** + * Scan a modules directory + */ + private async scanModulesDirectory(modulesPath: string): Promise { + let entries: fs.Dirent[]; + + try { + entries = fs.readdirSync(modulesPath, { withFileTypes: true }); + } catch (error) { + this.log(`Failed to read modules directory ${modulesPath}: ${this.getErrorMessage(error)}`, "warn"); + return; + } + + for (const entry of entries) { + if (entry.isDirectory()) { + const modulePath = path.join(modulesPath, entry.name); + const manifestsPath = path.join(modulePath, "manifests"); + + if (fs.existsSync(manifestsPath)) { + const relativePath = path.relative(this.controlRepoPath, manifestsPath); + await this.scanManifestsDirectory(manifestsPath, relativePath); + } + } + } + } + + /** + * Scan a single manifest file + */ + private async scanManifestFile(filePath: string, relativePath: string): Promise { + // Check cache + if (this.manifestCache.has(relativePath)) { + const cached = this.manifestCache.get(relativePath)!; + this.manifests.push(cached); + this.addManifestToIndex(cached); + return; + } + + let content: string; + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch (error) { + this.log(`Failed to read manifest ${relativePath}: ${this.getErrorMessage(error)}`, "warn"); + return; + } + + const manifestInfo = this.parseManifest(content, relativePath); + this.manifests.push(manifestInfo); + this.manifestCache.set(relativePath, manifestInfo); + this.addManifestToIndex(manifestInfo); + } + + /** + * Add manifest info to the class/defined type indexes + */ + private addManifestToIndex(manifest: ManifestInfo): void { + for (const classInfo of manifest.classes) { + this.classes.set(classInfo.name, classInfo); + } + + for (const typeInfo of manifest.definedTypes) { + this.definedTypes.set(typeInfo.name, typeInfo); + } + } + + /** + * Parse a Puppet manifest file + */ + private parseManifest(content: string, filePath: string): ManifestInfo { + const classes: PuppetClass[] = []; + const definedTypes: PuppetDefinedType[] = []; + const resources: ResourceInfo[] = []; + const includes: string[] = []; + const hieraLookups: string[] = []; + + const lines = content.split("\n"); + const linesOfCode = lines.filter((line) => { + const trimmed = line.trim(); + return trimmed.length > 0 && !trimmed.startsWith("#"); + }).length; + + // Parse class definitions + const classRegex = /^\s*class\s+([\w:]+)\s*(?:\(([\s\S]*?)\))?\s*(?:inherits\s+[\w:]+\s*)?{/gm; + let match: RegExpExecArray | null; + + while ((match = classRegex.exec(content)) !== null) { + const className = match[1]; + const lineNumber = this.getLineNumber(content, match.index); + const parameters = this.parseParameters(match[2] || ""); + + classes.push({ + name: className, + file: filePath, + line: lineNumber, + parameters, + }); + } + + // Parse defined type definitions + const defineRegex = /^\s*define\s+([\w:]+)\s*(?:\(([\s\S]*?)\))?\s*{/gm; + + while ((match = defineRegex.exec(content)) !== null) { + const typeName = match[1]; + const lineNumber = this.getLineNumber(content, match.index); + const parameters = this.parseParameters(match[2] || ""); + + definedTypes.push({ + name: typeName, + file: filePath, + line: lineNumber, + parameters, + }); + } + + // Parse resource declarations + const resourceRegex = /^\s*([\w:]+)\s*{\s*['"]?([^'":\s][^:]*?)['"]?\s*:/gm; + + while ((match = resourceRegex.exec(content)) !== null) { + const resourceType = match[1]; + const resourceTitle = match[2].trim(); + const lineNumber = this.getLineNumber(content, match.index); + + // Skip class, define, node declarations + if (!["class", "define", "node"].includes(resourceType.toLowerCase())) { + resources.push({ + type: resourceType, + title: resourceTitle, + file: filePath, + line: lineNumber, + }); + } + } + + // Parse include statements + const includeRegex = /^\s*(?:include|contain|require)\s+(?:['"]?([\w:]+)['"]?|[\w:]+)/gm; + + while ((match = includeRegex.exec(content)) !== null) { + const includedClass = match[1] || match[0].split(/\s+/)[1].replace(/['"]/g, ""); + includes.push(includedClass); + } + + // Parse Hiera lookups + const hieraRegex = /(?:hiera|lookup)\s*\(\s*['"]([^'"]+)['"]/g; + + while ((match = hieraRegex.exec(content)) !== null) { + hieraLookups.push(match[1]); + } + + // Also look for automatic parameter lookups (class parameters) + for (const classInfo of classes) { + for (const param of classInfo.parameters) { + // Class parameters are automatically looked up as classname::paramname + hieraLookups.push(`${classInfo.name}::${param}`); + } + } + + return { + file: filePath, + classes, + definedTypes, + resources, + includes, + hieraLookups, + linesOfCode, + }; + } + + /** + * Parse parameter list from class/define declaration + */ + private parseParameters(paramString: string): string[] { + if (!paramString.trim()) { + return []; + } + + const params: string[] = []; + // Simple parameter extraction - looks for $paramname + const paramRegex = /\$(\w+)/g; + let match: RegExpExecArray | null; + + while ((match = paramRegex.exec(paramString)) !== null) { + params.push(match[1]); + } + + return params; + } + + /** + * Get line number for a position in content + */ + private getLineNumber(content: string, position: number): number { + const beforeMatch = content.substring(0, position); + return (beforeMatch.match(/\n/g) || []).length + 1; + } + + + // ============================================================================ + // Cache Management + // ============================================================================ + + /** + * Clear all caches + */ + clearCache(): void { + this.analysisCache = null; + this.manifestCache.clear(); + this.log("Analysis cache cleared"); + } + + /** + * Reload the analyzer + */ + async reload(): Promise { + this.clearCache(); + this.classes.clear(); + this.definedTypes.clear(); + this.manifests = []; + this.initialized = false; + await this.initialize(); + } + + /** + * Create a cache entry + */ + private createCacheEntry(value: T): AnalysisCacheEntry { + const now = Date.now(); + const ttl = this.config.analysisInterval * 1000; // Convert to ms + return { + value, + cachedAt: now, + expiresAt: now + ttl, + }; + } + + /** + * Check if a cache entry is expired + */ + private isCacheExpired(entry: AnalysisCacheEntry): boolean { + return Date.now() > entry.expiresAt; + } + + // ============================================================================ + // Helper Methods + // ============================================================================ + + /** + * Ensure the analyzer is initialized + */ + private ensureInitialized(): void { + if (!this.initialized) { + throw new Error("CodeAnalyzer is not initialized. Call initialize() first."); + } + } + + /** + * Resolve a path relative to the control repository + */ + private resolvePath(filePath: string): string { + if (path.isAbsolute(filePath)) { + return filePath; + } + return path.join(this.controlRepoPath, filePath); + } + + /** + * Extract error message from unknown error + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } + + /** + * Log a message with analyzer context + */ + private log(message: string, level: "info" | "warn" | "error" = "info"): void { + const prefix = "[CodeAnalyzer]"; + switch (level) { + case "warn": + console.warn(prefix, message); + break; + case "error": + console.error(prefix, message); + break; + default: + // eslint-disable-next-line no-console + console.log(prefix, message); + } + } + + // ============================================================================ + // Accessors + // ============================================================================ + + /** + * Get the control repository path + */ + getControlRepoPath(): string { + return this.controlRepoPath; + } + + /** + * Get all discovered classes + */ + getClasses(): Map { + return this.classes; + } + + /** + * Get all discovered defined types + */ + getDefinedTypes(): Map { + return this.definedTypes; + } + + /** + * Get all scanned manifests + */ + getManifests(): ManifestInfo[] { + return this.manifests; + } + + /** + * Get the configuration + */ + getConfig(): CodeAnalysisConfig { + return this.config; + } +} diff --git a/backend/src/integrations/hiera/FactService.ts b/backend/src/integrations/hiera/FactService.ts new file mode 100644 index 0000000..a9c6fe5 --- /dev/null +++ b/backend/src/integrations/hiera/FactService.ts @@ -0,0 +1,472 @@ +/** + * FactService + * + * Thin wrapper around existing PuppetDB integration for fact retrieval. + * Provides fallback to local fact files when PuppetDB is unavailable. + * + * Design Decision: Rather than duplicating fact retrieval logic, this service + * delegates to the existing PuppetDBService.getNodeFacts() when PuppetDB + * integration is available. This ensures: + * - Single source of truth for PuppetDB communication + * - Consistent caching behavior + * - No code duplication + */ + +import * as fs from "fs"; +import * as path from "path"; +import type { IntegrationManager } from "../IntegrationManager"; +import type { InformationSourcePlugin } from "../types"; +import type { Facts, FactResult, LocalFactFile, FactSourceConfig } from "./types"; + +/** + * FactService + * + * Retrieves facts for nodes using PuppetDB as primary source + * with local fact files as fallback. + */ +export class FactService { + private integrationManager: IntegrationManager; + private localFactsPath?: string; + private preferPuppetDB: boolean; + + /** + * Create a new FactService + * + * @param integrationManager - Integration manager for accessing PuppetDB + * @param config - Fact source configuration + */ + constructor( + integrationManager: IntegrationManager, + config?: FactSourceConfig + ) { + this.integrationManager = integrationManager; + this.localFactsPath = config?.localFactsPath; + this.preferPuppetDB = config?.preferPuppetDB ?? true; + } + + /** + * Get facts for a node + * + * Uses PuppetDB if available, falls back to local files. + * Returns empty fact set with warning when no facts available. + * + * @param nodeId - Node identifier (certname) + * @returns Facts and metadata about the source + */ + async getFacts(nodeId: string): Promise { + // Try PuppetDB first if preferred + if (this.preferPuppetDB) { + const puppetdbResult = await this.getFactsFromPuppetDB(nodeId); + if (puppetdbResult) { + return puppetdbResult; + } + } + + // Try local facts + const localResult = await this.getFactsFromLocalFiles(nodeId); + if (localResult) { + return localResult; + } + + // Try PuppetDB as fallback if not preferred initially + if (!this.preferPuppetDB) { + const puppetdbResult = await this.getFactsFromPuppetDB(nodeId); + if (puppetdbResult) { + return puppetdbResult; + } + } + + // No facts available - return empty set with warning + return this.createEmptyFactResult(nodeId); + } + + /** + * Get the fact source that would be used for a node + * + * @param nodeId - Node identifier + * @returns Source type or 'none' if no facts available + */ + async getFactSource(nodeId: string): Promise<"puppetdb" | "local" | "none"> { + // Check PuppetDB availability + const puppetdb = this.getPuppetDBSource(); + if (puppetdb?.isInitialized()) { + try { + await puppetdb.getNodeFacts(nodeId); + return "puppetdb"; + } catch { + // PuppetDB doesn't have facts for this node + } + } + + // Check local facts + if (this.localFactsPath) { + const factFile = this.getLocalFactFilePath(nodeId); + if (factFile && fs.existsSync(factFile)) { + return "local"; + } + } + + return "none"; + } + + /** + * List all nodes with available facts (from any source) + * + * @returns Array of node identifiers + */ + async listAvailableNodes(): Promise { + const nodes = new Set(); + + // Get nodes from PuppetDB + const puppetdb = this.getPuppetDBSource(); + if (puppetdb?.isInitialized()) { + try { + const inventory = await puppetdb.getInventory(); + for (const node of inventory) { + nodes.add(node.id); + } + } catch (error) { + this.log(`Failed to get nodes from PuppetDB: ${this.getErrorMessage(error)}`, "warn"); + } + } + + // Get nodes from local fact files + if (this.localFactsPath && fs.existsSync(this.localFactsPath)) { + try { + const files = fs.readdirSync(this.localFactsPath); + for (const file of files) { + if (file.endsWith(".json")) { + // Extract node name from filename (remove .json extension) + const nodeName = file.slice(0, -5); + nodes.add(nodeName); + } + } + } catch (error) { + this.log(`Failed to list local fact files: ${this.getErrorMessage(error)}`, "warn"); + } + } + + return Array.from(nodes); + } + + /** + * Update the local facts path + * + * @param localFactsPath - New path to local fact files + */ + setLocalFactsPath(localFactsPath: string | undefined): void { + this.localFactsPath = localFactsPath; + } + + /** + * Update the PuppetDB preference + * + * @param preferPuppetDB - Whether to prefer PuppetDB over local facts + */ + setPreferPuppetDB(preferPuppetDB: boolean): void { + this.preferPuppetDB = preferPuppetDB; + } + + /** + * Get facts from PuppetDB + * + * @param nodeId - Node identifier + * @returns FactResult or null if unavailable + */ + private async getFactsFromPuppetDB(nodeId: string): Promise { + const puppetdb = this.getPuppetDBSource(); + + if (!puppetdb?.isInitialized()) { + this.log("PuppetDB integration not available"); + return null; + } + + try { + const facts = await puppetdb.getNodeFacts(nodeId); + return { + facts, + source: "puppetdb", + }; + } catch (error) { + this.log(`Failed to get facts from PuppetDB for node '${nodeId}': ${this.getErrorMessage(error)}`, "warn"); + return null; + } + } + + /** + * Get facts from local fact files + * + * @param nodeId - Node identifier + * @returns FactResult or null if unavailable + */ + private async getFactsFromLocalFiles(nodeId: string): Promise { + if (!this.localFactsPath) { + return null; + } + + const factFile = this.getLocalFactFilePath(nodeId); + if (!factFile || !fs.existsSync(factFile)) { + return null; + } + + try { + const facts = await this.parseLocalFactFile(factFile, nodeId); + return { + facts, + source: "local", + warnings: ["Using local fact files - facts may be outdated"], + }; + } catch (error) { + this.log(`Failed to parse local fact file for node '${nodeId}': ${this.getErrorMessage(error)}`, "warn"); + return null; + } + } + + /** + * Parse a local fact file in Puppetserver format + * + * Supports the Puppetserver fact file format with "name" and "values" structure. + * + * @param filePath - Path to the fact file + * @param nodeId - Node identifier + * @returns Parsed facts + */ + private async parseLocalFactFile(filePath: string, nodeId: string): Promise { + const content = fs.readFileSync(filePath, "utf-8"); + const parsed = JSON.parse(content) as LocalFactFile | Record; + + // Check if it's in Puppetserver format (has "name" and "values") + if (this.isLocalFactFile(parsed)) { + return this.transformLocalFactFile(parsed, nodeId); + } + + // Assume it's a flat fact structure + return this.transformFlatFacts(parsed as Record, nodeId); + } + + /** + * Check if parsed content is in LocalFactFile format + * + * @param parsed - Parsed JSON content + * @returns True if in LocalFactFile format + */ + private isLocalFactFile(parsed: unknown): parsed is LocalFactFile { + return ( + typeof parsed === "object" && + parsed !== null && + "name" in parsed && + "values" in parsed && + typeof (parsed as LocalFactFile).name === "string" && + typeof (parsed as LocalFactFile).values === "object" + ); + } + + /** + * Transform LocalFactFile format to Facts + * + * @param factFile - Local fact file content + * @param nodeId - Node identifier + * @returns Transformed facts + */ + private transformLocalFactFile(factFile: LocalFactFile, nodeId: string): Facts { + const values = factFile.values; + + return { + nodeId, + gatheredAt: new Date().toISOString(), + source: "local", + facts: this.buildFactsObject(values), + }; + } + + /** + * Transform flat fact structure to Facts + * + * @param flatFacts - Flat fact object + * @param nodeId - Node identifier + * @returns Transformed facts + */ + private transformFlatFacts(flatFacts: Record, nodeId: string): Facts { + return { + nodeId, + gatheredAt: new Date().toISOString(), + source: "local", + facts: this.buildFactsObject(flatFacts), + }; + } + + /** + * Build a Facts.facts object from raw fact values + * + * Ensures required fields have default values if missing. + * + * @param values - Raw fact values + * @returns Facts.facts object + */ + private buildFactsObject(values: Record): Facts["facts"] { + // Extract or create default values for required fields + const os = this.extractOsFacts(values); + const processors = this.extractProcessorFacts(values); + const memory = this.extractMemoryFacts(values); + const networking = this.extractNetworkingFacts(values); + + return { + os, + processors, + memory, + networking, + ...values, + }; + } + + /** + * Extract OS facts with defaults + */ + private extractOsFacts(values: Record): Facts["facts"]["os"] { + const os = values.os as Record | undefined; + + return { + family: (os?.family as string) ?? "Unknown", + name: (os?.name as string) ?? "Unknown", + release: { + full: ((os?.release as Record)?.full as string) ?? "Unknown", + major: ((os?.release as Record)?.major as string) ?? "Unknown", + }, + }; + } + + /** + * Extract processor facts with defaults + */ + private extractProcessorFacts(values: Record): Facts["facts"]["processors"] { + const processors = values.processors as Record | undefined; + + return { + count: (processors?.count as number) ?? 0, + models: (processors?.models as string[]) ?? [], + }; + } + + /** + * Extract memory facts with defaults + */ + private extractMemoryFacts(values: Record): Facts["facts"]["memory"] { + const memory = values.memory as Record | undefined; + const system = memory?.system as Record | undefined; + + return { + system: { + total: (system?.total as string) ?? "Unknown", + available: (system?.available as string) ?? "Unknown", + }, + }; + } + + /** + * Extract networking facts with defaults + */ + private extractNetworkingFacts(values: Record): Facts["facts"]["networking"] { + const networking = values.networking as Record | undefined; + + return { + hostname: (networking?.hostname as string) ?? "Unknown", + interfaces: (networking?.interfaces as Record) ?? {}, + }; + } + + /** + * Get the path to a local fact file for a node + * + * @param nodeId - Node identifier + * @returns File path or null if local facts not configured + */ + private getLocalFactFilePath(nodeId: string): string | null { + if (!this.localFactsPath) { + return null; + } + + return path.join(this.localFactsPath, `${nodeId}.json`); + } + + /** + * Create an empty fact result for when no facts are available + * + * @param nodeId - Node identifier + * @returns Empty FactResult with warning + */ + private createEmptyFactResult(nodeId: string): FactResult { + return { + facts: { + nodeId, + gatheredAt: new Date().toISOString(), + source: "local", + facts: { + os: { + family: "Unknown", + name: "Unknown", + release: { + full: "Unknown", + major: "Unknown", + }, + }, + processors: { + count: 0, + models: [], + }, + memory: { + system: { + total: "Unknown", + available: "Unknown", + }, + }, + networking: { + hostname: "Unknown", + interfaces: {}, + }, + }, + }, + source: "local", + warnings: [`No facts available for node '${nodeId}'`], + }; + } + + /** + * Get the PuppetDB information source from the integration manager + * + * @returns PuppetDB plugin or null + */ + private getPuppetDBSource(): InformationSourcePlugin | null { + return this.integrationManager.getInformationSource("puppetdb"); + } + + /** + * Extract error message from unknown error + * + * @param error - Unknown error + * @returns Error message string + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } + + /** + * Log a message + * + * @param message - Message to log + * @param level - Log level + */ + private log(message: string, level: "info" | "warn" | "error" = "info"): void { + const prefix = "[FactService]"; + switch (level) { + case "warn": + console.warn(prefix, message); + break; + case "error": + console.error(prefix, message); + break; + default: + // eslint-disable-next-line no-console + console.log(prefix, message); + } + } +} diff --git a/backend/src/integrations/hiera/ForgeClient.ts b/backend/src/integrations/hiera/ForgeClient.ts new file mode 100644 index 0000000..b1c66ba --- /dev/null +++ b/backend/src/integrations/hiera/ForgeClient.ts @@ -0,0 +1,511 @@ +/** + * ForgeClient + * + * Client for querying the Puppet Forge API to get module information, + * latest versions, and security advisories. + * + * Requirements: 10.2, 10.4 + */ + +import type { ModuleUpdate } from "./types"; +import type { ParsedModule } from "./PuppetfileParser"; + +/** + * Puppet Forge module information + */ +export interface ForgeModuleInfo { + slug: string; + name: string; + owner: { slug: string; username: string }; + current_release: { + version: string; + created_at: string; + deleted_at: string | null; + file_uri: string; + file_size: number; + supported: boolean; + }; + releases: Array<{ + version: string; + created_at: string; + }>; + deprecated_at: string | null; + deprecated_for: string | null; + superseded_by: { slug: string } | null; + endorsement: string | null; + module_group: string; + premium: boolean; +} + +/** + * Security advisory information + */ +export interface SecurityAdvisory { + id: string; + title: string; + severity: "critical" | "high" | "medium" | "low"; + affectedVersions: string; + fixedVersion?: string; + description: string; + url?: string; + publishedAt: string; +} + +/** + * Module security status + */ +export interface ModuleSecurityStatus { + moduleSlug: string; + hasAdvisories: boolean; + advisories: SecurityAdvisory[]; + deprecated: boolean; + deprecationReason?: string; +} + +/** + * Forge API error + */ +export interface ForgeApiError { + message: string; + statusCode?: number; + moduleSlug?: string; +} + +/** + * Module update check result + */ +export interface ModuleUpdateCheckResult { + module: ParsedModule; + currentVersion: string; + latestVersion: string; + hasUpdate: boolean; + deprecated: boolean; + deprecatedFor?: string; + supersededBy?: string; + securityStatus?: ModuleSecurityStatus; + error?: string; +} + +/** + * ForgeClient configuration + */ +export interface ForgeClientConfig { + baseUrl?: string; + timeout?: number; + userAgent?: string; + securityAdvisoryUrl?: string; +} + +const DEFAULT_FORGE_URL = "https://forgeapi.puppet.com"; +const DEFAULT_TIMEOUT = 10000; +const DEFAULT_USER_AGENT = "Pabawi/0.4.0"; + +/** + * Known security advisories for common Puppet modules + * This is a static list that can be extended or replaced with an external service + */ +const KNOWN_SECURITY_ADVISORIES: Record = { + // Example: puppetlabs/apache had a security issue in older versions + // This would be populated from a security advisory database +}; + +/** + * ForgeClient class for querying Puppet Forge API + */ +export class ForgeClient { + private baseUrl: string; + private timeout: number; + private userAgent: string; + private securityAdvisories: Map = new Map(); + + constructor(config: ForgeClientConfig = {}) { + this.baseUrl = config.baseUrl ?? DEFAULT_FORGE_URL; + this.timeout = config.timeout ?? DEFAULT_TIMEOUT; + this.userAgent = config.userAgent ?? DEFAULT_USER_AGENT; + + // Initialize with known advisories + this.loadKnownAdvisories(); + } + + /** + * Load known security advisories + */ + private loadKnownAdvisories(): void { + for (const [moduleSlug, advisories] of Object.entries(KNOWN_SECURITY_ADVISORIES)) { + this.securityAdvisories.set(this.normalizeSlug(moduleSlug), advisories); + } + } + + /** + * Add a security advisory for a module + * This can be used to dynamically add advisories from external sources + */ + addSecurityAdvisory(moduleSlug: string, advisory: SecurityAdvisory): void { + const normalized = this.normalizeSlug(moduleSlug); + const existing = this.securityAdvisories.get(normalized) ?? []; + existing.push(advisory); + this.securityAdvisories.set(normalized, existing); + } + + /** + * Get security advisories for a module + * + * @param moduleSlug - Module slug in format "author/name" or "author-name" + * @param version - Optional version to filter advisories + * @returns List of security advisories affecting the module + */ + getSecurityAdvisories(moduleSlug: string, version?: string): SecurityAdvisory[] { + const normalized = this.normalizeSlug(moduleSlug); + const advisories = this.securityAdvisories.get(normalized) ?? []; + + if (!version) { + return advisories; + } + + // Filter advisories that affect the specified version + return advisories.filter((advisory) => { + return this.isVersionAffected(version, advisory.affectedVersions, advisory.fixedVersion); + }); + } + + /** + * Check if a version is affected by an advisory + */ + private isVersionAffected(version: string, affectedVersions: string, fixedVersion?: string): boolean { + // Simple version range check + // affectedVersions format: "< 2.0.0" or ">= 1.0.0, < 2.0.0" + + if (fixedVersion && !this.isNewerVersion(fixedVersion, version)) { + // Version is at or after the fix + return false; + } + + // Parse affected versions range + const ranges = affectedVersions.split(",").map((r) => r.trim()); + + for (const range of ranges) { + const ltMatch = range.match(/^<\s*(.+)$/); + const lteMatch = range.match(/^<=\s*(.+)$/); + const gtMatch = range.match(/^>\s*(.+)$/); + const gteMatch = range.match(/^>=\s*(.+)$/); + const eqMatch = range.match(/^=\s*(.+)$/); + + if (ltMatch) { + if (!this.isNewerVersion(ltMatch[1], version)) continue; + return true; + } + if (lteMatch) { + if (this.isNewerVersion(version, lteMatch[1])) continue; + return true; + } + if (gtMatch) { + if (!this.isNewerVersion(version, gtMatch[1])) continue; + return true; + } + if (gteMatch) { + if (this.isNewerVersion(gteMatch[1], version)) continue; + return true; + } + if (eqMatch) { + if (version !== eqMatch[1]) continue; + return true; + } + } + + return false; + } + + /** + * Get security status for a module + * + * @param moduleSlug - Module slug + * @param version - Current version + * @returns Security status including advisories and deprecation info + */ + async getSecurityStatus(moduleSlug: string, version: string): Promise { + const normalized = this.normalizeSlug(moduleSlug); + const advisories = this.getSecurityAdvisories(moduleSlug, version); + + // Also check if module is deprecated (which is a security concern) + const moduleInfo = await this.getModuleInfo(moduleSlug); + const deprecated = moduleInfo?.deprecated_at !== null; + + return { + moduleSlug: normalized, + hasAdvisories: advisories.length > 0 || deprecated, + advisories, + deprecated, + deprecationReason: moduleInfo?.deprecated_for ?? undefined, + }; + } + + /** + * Check security for multiple modules + * + * @param modules - List of parsed modules + * @returns Map of module slug to security status + */ + async checkSecurityForModules(modules: ParsedModule[]): Promise> { + const results = new Map(); + + // Only check forge modules (git modules would need different handling) + const forgeModules = modules.filter((m) => m.source === "forge"); + + for (const mod of forgeModules) { + const slug = mod.forgeSlug ?? mod.name; + const status = await this.getSecurityStatus(slug, mod.version); + results.set(this.normalizeSlug(slug), status); + } + + return results; + } + + /** + * Get module information from Puppet Forge + * + * @param moduleSlug - Module slug in format "author/name" or "author-name" + * @returns Module information or null if not found + */ + async getModuleInfo(moduleSlug: string): Promise { + const normalizedSlug = this.normalizeSlug(moduleSlug); + const url = `${this.baseUrl}/v3/modules/${normalizedSlug}`; + + try { + const response = await this.fetchWithTimeout(url); + + if (response.status === 404) { + return null; + } + + if (!response.ok) { + throw new Error(`Forge API returned status ${response.status}`); + } + + const data = await response.json(); + return data as ForgeModuleInfo; + } catch (error) { + this.log(`Failed to fetch module info for ${moduleSlug}: ${this.getErrorMessage(error)}`, "warn"); + return null; + } + } + + /** + * Get the latest version of a module + * + * @param moduleSlug - Module slug in format "author/name" or "author-name" + * @returns Latest version string or null if not found + */ + async getLatestVersion(moduleSlug: string): Promise { + const moduleInfo = await this.getModuleInfo(moduleSlug); + return moduleInfo?.current_release?.version ?? null; + } + + /** + * Check for updates for a list of modules + * + * @param modules - List of parsed modules to check + * @returns List of module update check results + */ + async checkForUpdates(modules: ParsedModule[]): Promise { + const results: ModuleUpdateCheckResult[] = []; + + // Process modules in parallel with concurrency limit + const concurrencyLimit = 5; + const forgeModules = modules.filter((m) => m.source === "forge"); + + for (let i = 0; i < forgeModules.length; i += concurrencyLimit) { + const batch = forgeModules.slice(i, i + concurrencyLimit); + const batchResults = await Promise.all( + batch.map((mod) => this.checkModuleUpdate(mod)) + ); + results.push(...batchResults); + } + + // Add git modules without update check (can't check git repos via Forge) + const gitModules = modules.filter((m) => m.source === "git"); + for (const mod of gitModules) { + results.push({ + module: mod, + currentVersion: mod.version, + latestVersion: mod.version, + hasUpdate: false, + deprecated: false, + }); + } + + return results; + } + + /** + * Check for update for a single module + */ + private async checkModuleUpdate(module: ParsedModule): Promise { + const slug = module.forgeSlug ?? module.name; + + try { + const moduleInfo = await this.getModuleInfo(slug); + + if (!moduleInfo) { + return { + module, + currentVersion: module.version, + latestVersion: module.version, + hasUpdate: false, + deprecated: false, + error: `Module not found on Puppet Forge: ${slug}`, + }; + } + + const latestVersion = moduleInfo.current_release?.version ?? module.version; + const hasUpdate = this.isNewerVersion(latestVersion, module.version); + + // Get security status + const securityStatus = await this.getSecurityStatus(slug, module.version); + + return { + module, + currentVersion: module.version, + latestVersion, + hasUpdate, + deprecated: moduleInfo.deprecated_at !== null, + deprecatedFor: moduleInfo.deprecated_for ?? undefined, + supersededBy: moduleInfo.superseded_by?.slug, + securityStatus, + }; + } catch (error) { + return { + module, + currentVersion: module.version, + latestVersion: module.version, + hasUpdate: false, + deprecated: false, + error: this.getErrorMessage(error), + }; + } + } + + /** + * Convert update check results to ModuleUpdate format + */ + toModuleUpdates(results: ModuleUpdateCheckResult[]): ModuleUpdate[] { + return results.map((result) => { + const hasSecurityAdvisory = result.securityStatus?.hasAdvisories ?? false; + + let changelog: string | undefined; + if (result.deprecated) { + changelog = `Deprecated${result.deprecatedFor ? `: ${result.deprecatedFor}` : ""}${result.supersededBy ? `. Superseded by ${result.supersededBy}` : ""}`; + } + if (result.securityStatus?.advisories && result.securityStatus.advisories.length > 0) { + const advisoryInfo = result.securityStatus.advisories + .map((a) => `${a.severity.toUpperCase()}: ${a.title}`) + .join("; "); + changelog = changelog ? `${changelog}. Security: ${advisoryInfo}` : `Security: ${advisoryInfo}`; + } + + return { + name: result.module.name, + currentVersion: result.currentVersion, + latestVersion: result.latestVersion, + source: result.module.source, + hasSecurityAdvisory, + changelog, + }; + }); + } + + /** + * Compare two semantic versions + * + * @returns true if version1 is newer than version2 + */ + isNewerVersion(version1: string, version2: string): boolean { + // Handle special cases + if (version2 === "latest" || version2 === "HEAD" || version2 === "local") { + return false; + } + + // Parse versions + const v1Parts = this.parseVersion(version1); + const v2Parts = this.parseVersion(version2); + + // Compare major, minor, patch + for (let i = 0; i < Math.max(v1Parts.length, v2Parts.length); i++) { + const p1 = v1Parts[i] ?? 0; + const p2 = v2Parts[i] ?? 0; + + if (p1 > p2) return true; + if (p1 < p2) return false; + } + + return false; + } + + /** + * Parse a version string into numeric parts + */ + private parseVersion(version: string): number[] { + // Remove leading 'v' if present + const cleaned = version.replace(/^v/, ""); + + // Split by dots and convert to numbers + return cleaned.split(".").map((part) => { + // Extract numeric portion (handles things like "1.0.0-rc1") + const match = part.match(/^(\d+)/); + return match ? parseInt(match[1], 10) : 0; + }); + } + + /** + * Normalize module slug to Forge format (author-name) + */ + private normalizeSlug(slug: string): string { + // Convert author/name to author-name + return slug.replace("/", "-"); + } + + /** + * Fetch with timeout + */ + private async fetchWithTimeout(url: string): Promise { + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), this.timeout); + + try { + const response = await fetch(url, { + headers: { + "User-Agent": this.userAgent, + Accept: "application/json", + }, + signal: controller.signal, + }); + return response; + } finally { + clearTimeout(timeoutId); + } + } + + /** + * Extract error message from unknown error + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } + + /** + * Log a message + */ + private log(message: string, level: "info" | "warn" | "error" = "info"): void { + const prefix = "[ForgeClient]"; + switch (level) { + case "warn": + console.warn(prefix, message); + break; + case "error": + console.error(prefix, message); + break; + default: + // eslint-disable-next-line no-console + console.log(prefix, message); + } + } +} diff --git a/backend/src/integrations/hiera/HieraParser.ts b/backend/src/integrations/hiera/HieraParser.ts new file mode 100644 index 0000000..c642325 --- /dev/null +++ b/backend/src/integrations/hiera/HieraParser.ts @@ -0,0 +1,789 @@ +/** + * HieraParser + * + * Parses hiera.yaml configuration files in Hiera 5 format. + * Extracts hierarchy levels, paths, data providers, and lookup options. + */ + +import * as fs from "fs"; +import * as path from "path"; +import { parse as parseYaml, YAMLParseError } from "yaml"; +import type { + HieraConfig, + HieraDefaults, + HierarchyLevel, + LookupOptions, + LookupMethod, + Facts, + HieraError, +} from "./types"; +import { HIERA_ERROR_CODES } from "./types"; + +/** + * Result of parsing a Hiera configuration + */ +export interface HieraParseResult { + success: boolean; + config?: HieraConfig; + error?: HieraError; +} + +/** + * Result of validating a Hiera configuration + */ +export interface ValidationResult { + valid: boolean; + errors: string[]; + warnings: string[]; +} + +/** + * Supported data backends + */ +export type DataBackend = "yaml" | "json" | "eyaml"; + +/** + * Detected backend information + */ +export interface BackendInfo { + type: DataBackend; + datadir: string; + options?: Record; +} + +/** + * HieraParser class for parsing Hiera 5 configuration files + */ +export class HieraParser { + private controlRepoPath: string; + + constructor(controlRepoPath: string) { + this.controlRepoPath = controlRepoPath; + } + + + /** + * Parse a hiera.yaml configuration file + * + * @param configPath - Path to hiera.yaml (relative to control repo or absolute) + * @returns Parse result with config or error + */ + async parse(configPath: string): Promise { + const fullPath = this.resolvePath(configPath); + + // Check if file exists + if (!fs.existsSync(fullPath)) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.INVALID_PATH, + message: `Hiera configuration file not found: ${fullPath}`, + details: { + file: fullPath, + suggestion: "Ensure the hiera.yaml file exists in your control repository", + }, + }, + }; + } + + // Read file content + let content: string; + try { + content = fs.readFileSync(fullPath, "utf-8"); + } catch (error) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.INVALID_PATH, + message: `Failed to read hiera.yaml: ${error instanceof Error ? error.message : String(error)}`, + details: { + file: fullPath, + }, + }, + }; + } + + // Parse YAML content + return this.parseContent(content, fullPath); + } + + /** + * Parse YAML content string + * + * @param content - YAML content string + * @param filePath - Path for error reporting + * @returns Parse result with config or error + */ + parseContent(content: string, filePath: string = "hiera.yaml"): HieraParseResult { + let rawConfig: unknown; + + try { + rawConfig = parseYaml(content, { + strict: true, + uniqueKeys: true, + }); + } catch (error) { + if (error instanceof YAMLParseError) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `YAML syntax error: ${error.message}`, + details: { + file: filePath, + line: error.linePos?.[0]?.line, + suggestion: "Check YAML syntax at the indicated line", + }, + }, + }; + } + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `Failed to parse YAML: ${error instanceof Error ? error.message : String(error)}`, + details: { + file: filePath, + }, + }, + }; + } + + // Validate and transform to HieraConfig + return this.validateAndTransform(rawConfig, filePath); + } + + + /** + * Validate raw config and transform to HieraConfig + * + * @param rawConfig - Raw parsed YAML object + * @param filePath - Path for error reporting + * @returns Parse result with validated config or error + */ + private validateAndTransform(rawConfig: unknown, filePath: string): HieraParseResult { + if (!rawConfig || typeof rawConfig !== "object") { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: "Invalid hiera.yaml: expected an object", + details: { + file: filePath, + suggestion: "Ensure hiera.yaml contains valid Hiera 5 configuration", + }, + }, + }; + } + + const config = rawConfig as Record; + + // Validate version + if (config.version !== 5) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `Unsupported Hiera version: ${String(config.version)}. Only Hiera 5 is supported.`, + details: { + file: filePath, + suggestion: "Set version: 5 in your hiera.yaml", + }, + }, + }; + } + + // Validate hierarchy + if (!Array.isArray(config.hierarchy)) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: "Invalid hiera.yaml: 'hierarchy' must be an array", + details: { + file: filePath, + suggestion: "Add a hierarchy array with at least one level", + }, + }, + }; + } + + // Parse hierarchy levels + const hierarchy: HierarchyLevel[] = []; + for (let i = 0; i < config.hierarchy.length; i++) { + const level = config.hierarchy[i]; + const parsedLevel = this.parseHierarchyLevel(level, i, filePath); + if (!parsedLevel.success) { + return { + success: false, + error: parsedLevel.error, + }; + } + hierarchy.push(parsedLevel.level!); + } + + // Parse defaults if present + const defaults = config.defaults + ? this.parseDefaults(config.defaults as Record) + : undefined; + + const hieraConfig: HieraConfig = { + version: 5, + hierarchy, + defaults, + }; + + return { + success: true, + config: hieraConfig, + }; + } + + + /** + * Parse a single hierarchy level + * + * @param level - Raw hierarchy level object + * @param index - Index in hierarchy array + * @param filePath - Path for error reporting + * @returns Parsed hierarchy level or error + */ + private parseHierarchyLevel( + level: unknown, + index: number, + filePath: string + ): { success: boolean; level?: HierarchyLevel; error?: HieraError } { + if (!level || typeof level !== "object") { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `Invalid hierarchy level at index ${index}: expected an object`, + details: { + file: filePath, + }, + }, + }; + } + + const rawLevel = level as Record; + + // Name is required + if (typeof rawLevel.name !== "string" || !rawLevel.name) { + return { + success: false, + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `Hierarchy level at index ${index} missing required 'name' field`, + details: { + file: filePath, + }, + }, + }; + } + + const hierarchyLevel: HierarchyLevel = { + name: rawLevel.name, + }; + + // Parse path/paths + if (typeof rawLevel.path === "string") { + hierarchyLevel.path = rawLevel.path; + } + if (Array.isArray(rawLevel.paths)) { + hierarchyLevel.paths = rawLevel.paths.filter( + (p): p is string => typeof p === "string" + ); + } + + // Parse glob/globs + if (typeof rawLevel.glob === "string") { + hierarchyLevel.glob = rawLevel.glob; + } + if (Array.isArray(rawLevel.globs)) { + hierarchyLevel.globs = rawLevel.globs.filter( + (g): g is string => typeof g === "string" + ); + } + + // Parse datadir + if (typeof rawLevel.datadir === "string") { + hierarchyLevel.datadir = rawLevel.datadir; + } + + // Parse data_hash (backend type) + if (typeof rawLevel.data_hash === "string") { + hierarchyLevel.data_hash = rawLevel.data_hash; + } + + // Parse lookup_key + if (typeof rawLevel.lookup_key === "string") { + hierarchyLevel.lookup_key = rawLevel.lookup_key; + } + + // Parse mapped_paths + if (Array.isArray(rawLevel.mapped_paths) && rawLevel.mapped_paths.length === 3) { + const [var1, var2, template] = rawLevel.mapped_paths; + if (typeof var1 === "string" && typeof var2 === "string" && typeof template === "string") { + hierarchyLevel.mapped_paths = [var1, var2, template]; + } + } + + // Parse options + if (rawLevel.options && typeof rawLevel.options === "object") { + hierarchyLevel.options = rawLevel.options as Record; + } + + return { + success: true, + level: hierarchyLevel, + }; + } + + + /** + * Parse defaults section + * + * @param defaults - Raw defaults object + * @returns Parsed defaults + */ + private parseDefaults(defaults: Record): HieraDefaults { + const result: HieraDefaults = {}; + + if (typeof defaults.datadir === "string") { + result.datadir = defaults.datadir; + } + if (typeof defaults.data_hash === "string") { + result.data_hash = defaults.data_hash; + } + if (typeof defaults.lookup_key === "string") { + result.lookup_key = defaults.lookup_key; + } + if (defaults.options && typeof defaults.options === "object") { + result.options = defaults.options as Record; + } + + return result; + } + + /** + * Validate a parsed Hiera configuration + * + * @param config - Parsed Hiera configuration + * @returns Validation result with errors and warnings + */ + validateConfig(config: HieraConfig): ValidationResult { + const errors: string[] = []; + const warnings: string[] = []; + + // Check version + if (config.version !== 5) { + errors.push(`Unsupported Hiera version: ${config.version}`); + } + + // Check hierarchy + if (!config.hierarchy || config.hierarchy.length === 0) { + errors.push("Hierarchy is empty - at least one level is required"); + } + + // Validate each hierarchy level + for (const level of config.hierarchy) { + // Check for path specification + const hasPath = level.path || level.paths || level.glob || level.globs || level.mapped_paths; + if (!hasPath) { + warnings.push(`Hierarchy level '${level.name}' has no path specification`); + } + + // Check for data provider + const hasProvider = level.data_hash || level.lookup_key || config.defaults?.data_hash; + if (!hasProvider) { + warnings.push(`Hierarchy level '${level.name}' has no data provider specified`); + } + } + + return { + valid: errors.length === 0, + errors, + warnings, + }; + } + + + /** + * Detect the data backend type from a hierarchy level + * + * @param level - Hierarchy level + * @param defaults - Default settings + * @returns Detected backend info + */ + detectBackend(level: HierarchyLevel, defaults?: HieraDefaults): BackendInfo { + const dataHash = level.data_hash ?? defaults?.data_hash ?? "yaml_data"; + const datadir = level.datadir ?? defaults?.datadir ?? "data"; + + let type: DataBackend = "yaml"; + + if (dataHash.includes("json")) { + type = "json"; + } else if (dataHash.includes("eyaml") || level.lookup_key?.includes("eyaml")) { + type = "eyaml"; + } + + return { + type, + datadir, + options: level.options ?? defaults?.options, + }; + } + + /** + * Expand hierarchy paths with fact interpolation + * + * @param config - Hiera configuration + * @param facts - Node facts for interpolation + * @returns Array of expanded file paths + */ + expandHierarchyPaths(config: HieraConfig, facts: Facts): string[] { + const paths: string[] = []; + + for (const level of config.hierarchy) { + const datadir = level.datadir ?? config.defaults?.datadir ?? "data"; + const levelPaths = this.getLevelPaths(level); + + for (const levelPath of levelPaths) { + const interpolatedPath = this.interpolatePath(levelPath, facts); + const fullPath = path.join(datadir, interpolatedPath); + paths.push(fullPath); + } + } + + return paths; + } + + /** + * Get all paths from a hierarchy level + * + * @param level - Hierarchy level + * @returns Array of path templates + */ + private getLevelPaths(level: HierarchyLevel): string[] { + const paths: string[] = []; + + if (level.path) { + paths.push(level.path); + } + if (level.paths) { + paths.push(...level.paths); + } + if (level.glob) { + paths.push(level.glob); + } + if (level.globs) { + paths.push(...level.globs); + } + + return paths; + } + + + /** + * Interpolate variables in a path template + * + * Supports: + * - %{facts.xxx} - Hiera 5 fact syntax + * - %{::xxx} - Legacy top-scope variable syntax + * - %{xxx} - Simple variable syntax + * + * @param template - Path template with variables + * @param facts - Node facts for interpolation + * @param catalogVariables - Optional variables from catalog compilation + * @returns Interpolated path + */ + interpolatePath( + template: string, + facts: Facts, + catalogVariables: Record = {} + ): string { + // Pattern to match %{...} variables + const variablePattern = /%\{([^}]+)\}/g; + + return template.replace(variablePattern, (match, variable: string) => { + const value = this.resolveVariable(variable.trim(), facts, catalogVariables); + return value !== undefined ? String(value) : match; + }); + } + + /** + * Resolve a variable reference to its value + * + * @param variable - Variable reference (e.g., "facts.os.family", "::hostname") + * @param facts - Node facts + * @param catalogVariables - Optional variables from catalog compilation + * @returns Resolved value or undefined + */ + private resolveVariable( + variable: string, + facts: Facts, + catalogVariables: Record = {} + ): unknown { + // Handle facts.xxx syntax - always use facts + if (variable.startsWith("facts.")) { + const factPath = variable.slice(6); // Remove "facts." prefix + return this.getNestedValue(facts.facts, factPath); + } + + // Handle ::xxx legacy syntax (top-scope variables) - always use facts + if (variable.startsWith("::")) { + const factName = variable.slice(2); // Remove "::" prefix + return this.getNestedValue(facts.facts, factName); + } + + // Handle trusted.xxx syntax + if (variable.startsWith("trusted.")) { + const trustedPath = variable.slice(8); + const trusted = facts.facts["trusted"] as Record | undefined; + if (trusted) { + return this.getNestedValue(trusted, trustedPath); + } + return undefined; + } + + // Handle server_facts.xxx syntax + if (variable.startsWith("server_facts.")) { + const serverPath = variable.slice(13); + const serverFacts = facts.facts["server_facts"] as Record | undefined; + if (serverFacts) { + return this.getNestedValue(serverFacts, serverPath); + } + return undefined; + } + + // For other variables, check catalog variables first (code-defined variables) + if (Object.hasOwn(catalogVariables, variable)) { + return catalogVariables[variable]; + } + + // Check nested catalog variables + const catalogValue = this.getNestedValue(catalogVariables, variable); + if (catalogValue !== undefined) { + return catalogValue; + } + + // Fall back to direct fact lookup + return this.getNestedValue(facts.facts, variable); + } + + /** + * Get a nested value from an object using dot notation + * Uses Object.hasOwn() to prevent prototype pollution attacks + * + * @param obj - Object to traverse + * @param path - Dot-separated path (e.g., "os.family") + * @returns Value at path or undefined + */ + private getNestedValue(obj: Record, path: string): unknown { + const parts = path.split("."); + let current: unknown = obj; + + for (const part of parts) { + if (current === null || current === undefined) { + return undefined; + } + if (typeof current !== "object") { + return undefined; + } + // Use Object.hasOwn to prevent prototype pollution + if (!Object.hasOwn(current as Record, part)) { + return undefined; + } + current = (current as Record)[part]; + } + + return current; + } + + + /** + * Parse lookup_options from a hieradata file + * + * @param filePath - Path to hieradata file + * @returns Map of key to lookup options + */ + async parseLookupOptions(filePath: string): Promise> { + const fullPath = this.resolvePath(filePath); + const lookupOptionsMap = new Map(); + + if (!fs.existsSync(fullPath)) { + return lookupOptionsMap; + } + + let content: string; + try { + content = fs.readFileSync(fullPath, "utf-8"); + } catch { + return lookupOptionsMap; + } + + let data: unknown; + try { + data = parseYaml(content); + } catch { + return lookupOptionsMap; + } + + if (!data || typeof data !== "object") { + return lookupOptionsMap; + } + + const dataObj = data as Record; + const lookupOptions = dataObj["lookup_options"]; + + if (!lookupOptions || typeof lookupOptions !== "object") { + return lookupOptionsMap; + } + + const optionsObj = lookupOptions as Record; + + for (const [key, options] of Object.entries(optionsObj)) { + if (options && typeof options === "object") { + const parsedOptions = this.parseSingleLookupOptions(options as Record); + if (parsedOptions) { + lookupOptionsMap.set(key, parsedOptions); + } + } + } + + return lookupOptionsMap; + } + + /** + * Parse lookup options from content string + * + * @param content - YAML content string + * @returns Map of key to lookup options + */ + parseLookupOptionsFromContent(content: string): Map { + const lookupOptionsMap = new Map(); + + let data: unknown; + try { + data = parseYaml(content); + } catch { + return lookupOptionsMap; + } + + if (!data || typeof data !== "object") { + return lookupOptionsMap; + } + + const dataObj = data as Record; + const lookupOptions = dataObj["lookup_options"]; + + if (!lookupOptions || typeof lookupOptions !== "object") { + return lookupOptionsMap; + } + + const optionsObj = lookupOptions as Record; + + for (const [key, options] of Object.entries(optionsObj)) { + if (options && typeof options === "object") { + const parsedOptions = this.parseSingleLookupOptions(options as Record); + if (parsedOptions) { + lookupOptionsMap.set(key, parsedOptions); + } + } + } + + return lookupOptionsMap; + } + + + /** + * Parse a single lookup options object + * + * @param options - Raw options object + * @returns Parsed lookup options or undefined + */ + private parseSingleLookupOptions(options: Record): LookupOptions | undefined { + const result: LookupOptions = {}; + let hasValidOption = false; + + // Parse merge strategy + if (typeof options.merge === "string") { + const merge = options.merge.toLowerCase(); + if (this.isValidLookupMethod(merge)) { + result.merge = merge; + hasValidOption = true; + } + } else if (typeof options.merge === "object" && options.merge !== null) { + // Handle merge as object with strategy + const mergeObj = options.merge as Record; + if (typeof mergeObj.strategy === "string") { + const strategy = mergeObj.strategy.toLowerCase(); + if (this.isValidLookupMethod(strategy)) { + result.merge = strategy; + hasValidOption = true; + } + } + } + + // Parse convert_to + if (typeof options.convert_to === "string") { + const convertTo = options.convert_to; + if (convertTo === "Array" || convertTo === "Hash") { + result.convert_to = convertTo; + hasValidOption = true; + } + } + + // Parse knockout_prefix + if (typeof options.knockout_prefix === "string") { + result.knockout_prefix = options.knockout_prefix; + hasValidOption = true; + } + + return hasValidOption ? result : undefined; + } + + /** + * Check if a string is a valid lookup method + * + * @param method - Method string to check + * @returns true if valid + */ + private isValidLookupMethod(method: string): method is LookupMethod { + return ["first", "unique", "hash", "deep"].includes(method); + } + + /** + * Resolve a path relative to the control repository + * + * @param filePath - Path to resolve + * @returns Absolute path + */ + private resolvePath(filePath: string): string { + if (path.isAbsolute(filePath)) { + return filePath; + } + return path.join(this.controlRepoPath, filePath); + } + + /** + * Get the control repository path + * + * @returns Control repository path + */ + getControlRepoPath(): string { + return this.controlRepoPath; + } + + /** + * Serialize a HieraConfig back to YAML string + * + * @param config - Hiera configuration + * @returns YAML string + */ + serializeConfig(config: HieraConfig): string { + const { stringify } = require("yaml") as { stringify: (obj: unknown) => string }; + return stringify(config); + } +} diff --git a/backend/src/integrations/hiera/HieraPlugin.ts b/backend/src/integrations/hiera/HieraPlugin.ts new file mode 100644 index 0000000..5d950fc --- /dev/null +++ b/backend/src/integrations/hiera/HieraPlugin.ts @@ -0,0 +1,746 @@ +/** + * HieraPlugin + * + * Integration plugin for local Puppet control repository analysis. + * Provides Hiera data lookup, key resolution, and code analysis capabilities. + * + * Implements InformationSourcePlugin interface to integrate with the + * existing plugin architecture used by PuppetDB and Puppetserver integrations. + * + * Requirements: 1.2, 1.3, 1.4, 1.6, 13.2, 13.3, 13.5 + */ + +import * as fs from "fs"; +import * as path from "path"; +import { BasePlugin } from "../BasePlugin"; +import type { + InformationSourcePlugin, + HealthStatus, +} from "../types"; +import type { Node, Facts } from "../../bolt/types"; +import type { IntegrationManager } from "../IntegrationManager"; +import { HieraService } from "./HieraService"; +import type { HieraServiceConfig } from "./HieraService"; +import { CodeAnalyzer } from "./CodeAnalyzer"; +import type { + HieraPluginConfig, + HieraHealthStatus, + CodeAnalysisResult, + HieraKeyIndex, + HieraResolution, + NodeHieraData, + KeyNodeValues, +} from "./types"; +import type { HieraConfig as SchemaHieraConfig } from "../../config/schema"; + +/** + * Control repository validation result + */ +interface ControlRepoValidationResult { + valid: boolean; + errors: string[]; + warnings: string[]; + structure: { + hasHieraYaml: boolean; + hasHieradataDir: boolean; + hasManifestsDir: boolean; + hasSiteModulesDir: boolean; + hasModulesDir: boolean; + hasPuppetfile: boolean; + }; +} + +/** + * HieraPlugin class + * + * Extends BasePlugin and implements InformationSourcePlugin to provide + * Hiera data lookup and code analysis capabilities. + */ +export class HieraPlugin extends BasePlugin implements InformationSourcePlugin { + type = "information" as const; + + private hieraService: HieraService | null = null; + private codeAnalyzer: CodeAnalyzer | null = null; + private integrationManager: IntegrationManager | null = null; + private hieraConfig: HieraPluginConfig | null = null; + private validationResult: ControlRepoValidationResult | null = null; + + /** + * Create a new HieraPlugin instance + */ + constructor() { + super("hiera", "information"); + } + + /** + * Set the IntegrationManager for accessing other integrations + * + * @param manager - IntegrationManager instance + */ + setIntegrationManager(manager: IntegrationManager): void { + this.integrationManager = manager; + } + + + /** + * Perform plugin-specific initialization + * + * Validates the control repository and initializes HieraService and CodeAnalyzer. + * + * Requirements: 1.2, 1.3, 1.4 + */ + protected async performInitialization(): Promise { + // Extract Hiera config from integration config + this.hieraConfig = this.extractHieraConfig(this.config.config as SchemaHieraConfig); + + // Check if integration is disabled + if (!this.config.enabled) { + this.log("Hiera integration is disabled"); + return; + } + + // Check if configuration is missing + if (!this.hieraConfig.controlRepoPath) { + this.log("Hiera integration is not configured (missing controlRepoPath)"); + return; + } + + // Validate control repository structure + this.validationResult = this.validateControlRepository(this.hieraConfig.controlRepoPath); + + if (!this.validationResult.valid) { + const errorMsg = `Control repository validation failed: ${this.validationResult.errors.join(", ")}`; + this.log(errorMsg, "error"); + throw new Error(errorMsg); + } + + // Log warnings if any + for (const warning of this.validationResult.warnings) { + this.log(warning, "warn"); + } + + // Ensure IntegrationManager is set + if (!this.integrationManager) { + throw new Error("IntegrationManager must be set before initialization"); + } + + // Initialize HieraService + const hieraServiceConfig: HieraServiceConfig = { + controlRepoPath: this.hieraConfig.controlRepoPath, + hieraConfigPath: this.hieraConfig.hieraConfigPath, + factSources: this.hieraConfig.factSources, + cache: this.hieraConfig.cache, + catalogCompilation: this.hieraConfig.catalogCompilation, + }; + + this.hieraService = new HieraService(this.integrationManager, hieraServiceConfig); + await this.hieraService.initialize(); + + // Initialize CodeAnalyzer + this.codeAnalyzer = new CodeAnalyzer( + this.hieraConfig.controlRepoPath, + this.hieraConfig.codeAnalysis + ); + this.codeAnalyzer.setIntegrationManager(this.integrationManager); + this.codeAnalyzer.setHieraScanner(this.hieraService.getScanner()); + await this.codeAnalyzer.initialize(); + + this.log("Hiera plugin initialized successfully"); + this.log(`Control repo: ${this.hieraConfig.controlRepoPath}`); + this.log(`Hiera config: ${this.hieraConfig.hieraConfigPath}`); + } + + /** + * Extract and normalize HieraPluginConfig from schema config + * + * @param schemaConfig - Configuration from schema + * @returns Normalized HieraPluginConfig + */ + private extractHieraConfig(schemaConfig: SchemaHieraConfig): HieraPluginConfig { + return { + enabled: schemaConfig.enabled ?? false, + controlRepoPath: schemaConfig.controlRepoPath ?? "", + hieraConfigPath: schemaConfig.hieraConfigPath ?? "hiera.yaml", + environments: schemaConfig.environments ?? ["production"], + factSources: { + preferPuppetDB: schemaConfig.factSources?.preferPuppetDB ?? true, + localFactsPath: schemaConfig.factSources?.localFactsPath, + }, + catalogCompilation: { + enabled: schemaConfig.catalogCompilation?.enabled ?? false, + timeout: schemaConfig.catalogCompilation?.timeout ?? 60000, + cacheTTL: schemaConfig.catalogCompilation?.cacheTTL ?? 300000, + }, + cache: { + enabled: schemaConfig.cache?.enabled ?? true, + ttl: schemaConfig.cache?.ttl ?? 300000, + maxEntries: schemaConfig.cache?.maxEntries ?? 10000, + }, + codeAnalysis: { + enabled: schemaConfig.codeAnalysis?.enabled ?? true, + lintEnabled: schemaConfig.codeAnalysis?.lintEnabled ?? true, + moduleUpdateCheck: schemaConfig.codeAnalysis?.moduleUpdateCheck ?? true, + analysisInterval: schemaConfig.codeAnalysis?.analysisInterval ?? 3600000, + exclusionPatterns: schemaConfig.codeAnalysis?.exclusionPatterns ?? [], + }, + }; + } + + + /** + * Validate control repository structure + * + * Checks that the path exists, is accessible, and contains expected Puppet structure. + * + * Requirements: 1.2, 1.3 + * + * @param controlRepoPath - Path to the control repository + * @returns Validation result with errors and warnings + */ + validateControlRepository(controlRepoPath: string): ControlRepoValidationResult { + const errors: string[] = []; + const warnings: string[] = []; + const structure = { + hasHieraYaml: false, + hasHieradataDir: false, + hasManifestsDir: false, + hasSiteModulesDir: false, + hasModulesDir: false, + hasPuppetfile: false, + }; + + // Check if path exists + if (!fs.existsSync(controlRepoPath)) { + errors.push(`Control repository path does not exist: ${controlRepoPath}`); + return { valid: false, errors, warnings, structure }; + } + + // Check if path is a directory + try { + const stats = fs.statSync(controlRepoPath); + if (!stats.isDirectory()) { + errors.push(`Control repository path is not a directory: ${controlRepoPath}`); + return { valid: false, errors, warnings, structure }; + } + } catch (error) { + errors.push(`Cannot access control repository path: ${controlRepoPath} - ${this.getErrorMessage(error)}`); + return { valid: false, errors, warnings, structure }; + } + + // Check for hiera.yaml (required) + const hieraYamlPath = path.join(controlRepoPath, this.hieraConfig?.hieraConfigPath ?? "hiera.yaml"); + if (fs.existsSync(hieraYamlPath)) { + structure.hasHieraYaml = true; + } else { + errors.push(`hiera.yaml not found at: ${hieraYamlPath}`); + } + + // Check for hieradata directory (common locations) + const hieradataPaths = ["data", "hieradata", "hiera"]; + for (const hieradataDir of hieradataPaths) { + const hieradataPath = path.join(controlRepoPath, hieradataDir); + if (fs.existsSync(hieradataPath) && fs.statSync(hieradataPath).isDirectory()) { + structure.hasHieradataDir = true; + break; + } + } + if (!structure.hasHieradataDir) { + warnings.push("No hieradata directory found (checked: data, hieradata, hiera)"); + } + + // Check for manifests directory (optional but common) + const manifestsPath = path.join(controlRepoPath, "manifests"); + if (fs.existsSync(manifestsPath) && fs.statSync(manifestsPath).isDirectory()) { + structure.hasManifestsDir = true; + } + + // Check for site-modules directory (optional) + const siteModulesPath = path.join(controlRepoPath, "site-modules"); + if (fs.existsSync(siteModulesPath) && fs.statSync(siteModulesPath).isDirectory()) { + structure.hasSiteModulesDir = true; + } + + // Check for modules directory (optional) + const modulesPath = path.join(controlRepoPath, "modules"); + if (fs.existsSync(modulesPath) && fs.statSync(modulesPath).isDirectory()) { + structure.hasModulesDir = true; + } + + // Check for Puppetfile (optional) + const puppetfilePath = path.join(controlRepoPath, "Puppetfile"); + if (fs.existsSync(puppetfilePath)) { + structure.hasPuppetfile = true; + } + + // Add warnings for missing optional components + if (!structure.hasManifestsDir && !structure.hasSiteModulesDir) { + warnings.push("No manifests or site-modules directory found - code analysis may be limited"); + } + + if (!structure.hasPuppetfile) { + warnings.push("No Puppetfile found - module update checking will be unavailable"); + } + + return { + valid: errors.length === 0, + errors, + warnings, + structure, + }; + } + + + /** + * Perform plugin-specific health check + * + * Checks control repo accessibility and hiera.yaml validity. + * + * Requirements: 13.2, 13.3 + * + * @returns Health status + */ + protected async performHealthCheck(): Promise> { + // Check if not configured + if (!this.hieraConfig?.controlRepoPath) { + return { + healthy: false, + message: "Hiera integration is not configured", + details: { + status: "not_configured", + }, + }; + } + + // Check if disabled + if (!this.config.enabled) { + return { + healthy: false, + message: "Hiera integration is disabled", + details: { + status: "disabled", + controlRepoPath: this.hieraConfig.controlRepoPath, + }, + }; + } + + // Validate control repository + const validation = this.validateControlRepository(this.hieraConfig.controlRepoPath); + + if (!validation.valid) { + return { + healthy: false, + message: `Control repository validation failed: ${validation.errors.join(", ")}`, + details: { + status: "error", + controlRepoPath: this.hieraConfig.controlRepoPath, + errors: validation.errors, + warnings: validation.warnings, + structure: validation.structure, + }, + }; + } + + // Check HieraService health + if (!this.hieraService?.isInitialized()) { + return { + healthy: false, + message: "HieraService is not initialized", + details: { + status: "error", + controlRepoPath: this.hieraConfig.controlRepoPath, + }, + }; + } + + // Get key index stats + let keyCount = 0; + let fileCount = 0; + let lastScanTime: string | undefined; + + try { + const keyIndex = await this.hieraService.getAllKeys(); + keyCount = keyIndex.totalKeys; + fileCount = keyIndex.totalFiles; + lastScanTime = keyIndex.lastScan; + } catch (error) { + return { + healthy: false, + message: `Failed to get Hiera key index: ${this.getErrorMessage(error)}`, + details: { + status: "error", + controlRepoPath: this.hieraConfig.controlRepoPath, + error: this.getErrorMessage(error), + }, + }; + } + + // Check hiera.yaml validity + const hieraConfigValid = this.hieraService.getHieraConfig() !== null; + + // Build health status + const healthStatus: HieraHealthStatus = { + healthy: true, + status: "connected", + message: "Hiera integration is healthy", + details: { + controlRepoAccessible: true, + hieraConfigValid, + factSourceAvailable: true, // Will be checked via FactService + lastScanTime, + keyCount, + fileCount, + }, + warnings: validation.warnings.length > 0 ? validation.warnings : undefined, + }; + + return { + healthy: healthStatus.healthy, + message: healthStatus.message, + details: healthStatus.details as Record, + }; + } + + + // ============================================================================ + // InformationSourcePlugin Interface Implementation + // ============================================================================ + + /** + * Get inventory of nodes + * + * Delegates to PuppetDB integration if available, otherwise returns empty array. + * The Hiera integration doesn't maintain its own node inventory. + * + * @returns Array of nodes + */ + async getInventory(): Promise { + // Hiera integration doesn't maintain its own inventory + // Delegate to PuppetDB if available + if (this.integrationManager) { + const puppetdb = this.integrationManager.getInformationSource("puppetdb"); + if (puppetdb?.isInitialized()) { + return puppetdb.getInventory(); + } + } + + // Return empty array if no PuppetDB + this.log("No PuppetDB integration available for inventory", "warn"); + return []; + } + + /** + * Get facts for a specific node + * + * Delegates to the FactService which handles PuppetDB and local fact sources. + * + * @param nodeId - Node identifier (certname) + * @returns Facts for the node + */ + async getNodeFacts(nodeId: string): Promise { + this.ensureInitialized(); + + if (!this.hieraService) { + throw new Error("HieraService is not initialized"); + } + + const factResult = await this.hieraService.getFactService().getFacts(nodeId); + + // Convert to Facts format expected by interface + return { + nodeId: factResult.facts.nodeId, + gatheredAt: factResult.facts.gatheredAt, + facts: factResult.facts.facts, + } as Facts; + } + + /** + * Get arbitrary data for a node + * + * Supports data types: + * - 'hiera': All Hiera data for the node + * - 'hiera-key': Resolve a specific Hiera key (requires key in options) + * - 'analysis': Code analysis results + * + * @param nodeId - Node identifier + * @param dataType - Type of data to retrieve + * @returns Data of the requested type + */ + async getNodeData(nodeId: string, dataType: string): Promise { + this.ensureInitialized(); + + switch (dataType) { + case "hiera": + return this.getNodeHieraData(nodeId); + case "analysis": + return this.getCodeAnalysis(); + default: + throw new Error( + `Unsupported data type: ${dataType}. Supported types are: hiera, analysis` + ); + } + } + + // ============================================================================ + // Hiera-Specific Methods + // ============================================================================ + + /** + * Get the HieraService instance + * + * @returns HieraService instance + */ + getHieraService(): HieraService { + if (!this.hieraService) { + throw new Error("HieraService is not initialized"); + } + return this.hieraService; + } + + /** + * Get the CodeAnalyzer instance + * + * @returns CodeAnalyzer instance + */ + getCodeAnalyzer(): CodeAnalyzer { + if (!this.codeAnalyzer) { + throw new Error("CodeAnalyzer is not initialized"); + } + return this.codeAnalyzer; + } + + /** + * Get all Hiera keys + * + * @returns Key index with all discovered keys + */ + async getAllKeys(): Promise { + this.ensureInitialized(); + return this.hieraService!.getAllKeys(); + } + + /** + * Search for Hiera keys + * + * @param query - Search query + * @returns Array of matching keys + */ + async searchKeys(query: string): Promise { + this.ensureInitialized(); + const keys = await this.hieraService!.searchKeys(query); + // Convert array to Map for consistency + const keyMap = new Map(); + for (const key of keys) { + keyMap.set(key.name, key); + } + return keyMap; + } + + /** + * Resolve a Hiera key for a node + * + * @param nodeId - Node identifier + * @param key - Hiera key to resolve + * @param environment - Optional Puppet environment + * @returns Resolution result + */ + async resolveKey( + nodeId: string, + key: string, + environment?: string + ): Promise { + this.ensureInitialized(); + return this.hieraService!.resolveKey(nodeId, key, environment); + } + + /** + * Get all Hiera data for a node + * + * @param nodeId - Node identifier + * @returns Node Hiera data + */ + async getNodeHieraData(nodeId: string): Promise { + this.ensureInitialized(); + return this.hieraService!.getNodeHieraData(nodeId); + } + + /** + * Get key values across all nodes + * + * @param key - Hiera key to look up + * @returns Array of key values for each node + */ + async getKeyValuesAcrossNodes(key: string): Promise { + this.ensureInitialized(); + return this.hieraService!.getKeyValuesAcrossNodes(key); + } + + /** + * Get code analysis results + * + * @returns Code analysis result + */ + async getCodeAnalysis(): Promise { + this.ensureInitialized(); + if (!this.codeAnalyzer) { + throw new Error("CodeAnalyzer is not initialized"); + } + return this.codeAnalyzer.analyze(); + } + + + // ============================================================================ + // Enable/Disable Functionality + // ============================================================================ + + /** + * Enable the Hiera integration + * + * Re-initializes the plugin with the existing configuration. + * + * Requirements: 13.5 + */ + async enable(): Promise { + if (this.config.enabled) { + this.log("Hiera integration is already enabled"); + return; + } + + this.config.enabled = true; + await this.performInitialization(); + this.initialized = true; + this.log("Hiera integration enabled"); + } + + /** + * Disable the Hiera integration + * + * Stops the plugin without removing configuration. + * + * Requirements: 13.5 + */ + async disable(): Promise { + if (!this.config.enabled) { + this.log("Hiera integration is already disabled"); + return; + } + + // Shutdown services + await this.shutdown(); + + this.config.enabled = false; + this.initialized = false; + this.log("Hiera integration disabled"); + } + + /** + * Check if the integration is enabled + * + * @returns true if enabled + */ + isEnabled(): boolean { + return this.config.enabled; + } + + // ============================================================================ + // Hot Reload Functionality + // ============================================================================ + + /** + * Reload control repository data + * + * Re-parses hiera.yaml and rescans hieradata without requiring restart. + * + * Requirements: 1.6 + */ + async reload(): Promise { + this.ensureInitialized(); + + this.log("Reloading control repository data..."); + + // Reload HieraService + if (this.hieraService) { + await this.hieraService.reloadControlRepo(); + } + + // Reload CodeAnalyzer + if (this.codeAnalyzer) { + await this.codeAnalyzer.reload(); + } + + this.log("Control repository data reloaded successfully"); + } + + /** + * Invalidate all caches + */ + invalidateCache(): void { + if (this.hieraService) { + this.hieraService.invalidateCache(); + } + if (this.codeAnalyzer) { + this.codeAnalyzer.clearCache(); + } + this.log("All caches invalidated"); + } + + // ============================================================================ + // Lifecycle Methods + // ============================================================================ + + /** + * Shutdown the plugin and clean up resources + */ + async shutdown(): Promise { + this.log("Shutting down Hiera plugin..."); + + if (this.hieraService) { + await this.hieraService.shutdown(); + this.hieraService = null; + } + + if (this.codeAnalyzer) { + this.codeAnalyzer.clearCache(); + this.codeAnalyzer = null; + } + + this.log("Hiera plugin shut down"); + } + + // ============================================================================ + // Helper Methods + // ============================================================================ + + /** + * Ensure the plugin is initialized + */ + private ensureInitialized(): void { + if (!this.initialized || !this.config.enabled) { + throw new Error("Hiera plugin is not initialized or is disabled"); + } + } + + /** + * Extract error message from unknown error + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } + + /** + * Get the current Hiera configuration + * + * @returns Hiera plugin configuration + */ + getHieraConfig(): HieraPluginConfig | null { + return this.hieraConfig; + } + + /** + * Get the control repository validation result + * + * @returns Validation result + */ + getValidationResult(): ControlRepoValidationResult | null { + return this.validationResult; + } +} diff --git a/backend/src/integrations/hiera/HieraResolver.ts b/backend/src/integrations/hiera/HieraResolver.ts new file mode 100644 index 0000000..976575b --- /dev/null +++ b/backend/src/integrations/hiera/HieraResolver.ts @@ -0,0 +1,885 @@ +/** + * HieraResolver + * + * Resolves Hiera keys using the configured hierarchy and node facts. + * Supports all lookup methods: first, unique, hash, deep. + * Tracks which hierarchy level provided the value and records all values from all levels. + * Optionally uses catalog compilation for code-defined variable resolution. + */ + +import * as fs from "fs"; +import * as path from "path"; +import { parse as parseYaml } from "yaml"; +import type { + HieraConfig, + HierarchyLevel, + HieraResolution, + HieraKeyLocation, + LookupMethod, + LookupOptions, + ResolveOptions, + MergeOptions, + Facts, +} from "./types"; +import { HieraParser } from "./HieraParser"; + +/** + * Options for catalog-aware resolution + */ +export interface CatalogAwareResolveOptions extends ResolveOptions { + /** Variables extracted from catalog compilation */ + catalogVariables?: Record; + /** Warnings from catalog compilation */ + catalogWarnings?: string[]; +} + +/** + * HieraResolver class for resolving Hiera keys + */ +export class HieraResolver { + private controlRepoPath: string; + private parser: HieraParser; + private lookupOptionsCache: Map> = new Map(); + + constructor(controlRepoPath: string) { + this.controlRepoPath = controlRepoPath; + this.parser = new HieraParser(controlRepoPath); + } + + /** + * Resolve a Hiera key using the hierarchy and facts + * + * @param key - The Hiera key to resolve + * @param facts - Node facts for interpolation + * @param config - Hiera configuration + * @param options - Optional resolve options (including catalog variables) + * @returns Resolution result with value and metadata + */ + async resolve( + key: string, + facts: Facts, + config: HieraConfig, + options?: CatalogAwareResolveOptions + ): Promise { + // Collect all values from all hierarchy levels + const allValues: HieraKeyLocation[] = []; + const interpolatedVariables: Record = {}; + + // Get lookup options for this key (from hieradata or options parameter) + const lookupOptions = await this.getLookupOptionsForKey(key, config, facts); + const lookupMethod = options?.lookupMethod ?? lookupOptions?.merge ?? "first"; + const mergeOptions = options?.mergeOptions ?? this.buildMergeOptions(lookupOptions); + + // Merge catalog variables with facts for interpolation + const catalogVariables = options?.catalogVariables ?? {}; + + // Iterate through hierarchy levels + for (const level of config.hierarchy) { + const levelValues = await this.resolveFromLevel(key, level, config, facts, catalogVariables); + + for (const location of levelValues) { + // Interpolate the value using both facts and catalog variables + const { value: interpolatedValue, variables } = this.interpolateValueWithCatalog( + location.value, + facts, + catalogVariables + ); + + // Track interpolated variables + Object.assign(interpolatedVariables, variables); + + allValues.push({ + ...location, + value: interpolatedValue, + }); + } + } + + // If no values found, return not found result + if (allValues.length === 0) { + return this.createNotFoundResult(key, lookupMethod, options?.defaultValue); + } + + // Apply lookup method to get final value + const resolvedValue = this.applyLookupMethod( + allValues.map(v => v.value), + lookupMethod, + mergeOptions + ); + + // Find the source of the resolved value (first match for 'first', all for merge) + const sourceLocation = allValues[0]; + + const result: HieraResolution = { + key, + resolvedValue, + lookupMethod, + sourceFile: sourceLocation.file, + hierarchyLevel: sourceLocation.hierarchyLevel, + allValues, + interpolatedVariables: Object.keys(interpolatedVariables).length > 0 + ? interpolatedVariables + : undefined, + found: true, + }; + + // Add catalog warnings if present + if (options?.catalogWarnings && options.catalogWarnings.length > 0) { + // Store warnings in interpolatedVariables for now (could add dedicated field later) + result.interpolatedVariables = { + ...result.interpolatedVariables, + __catalogWarnings: options.catalogWarnings, + }; + } + + return result; + } + + + /** + * Resolve a key from a single hierarchy level + * + * @param key - The key to resolve + * @param level - Hierarchy level to search + * @param config - Hiera configuration + * @param facts - Node facts + * @param catalogVariables - Variables from catalog compilation + * @returns Array of key locations found in this level + */ + private async resolveFromLevel( + key: string, + level: HierarchyLevel, + config: HieraConfig, + facts: Facts, + catalogVariables: Record = {} + ): Promise { + const locations: HieraKeyLocation[] = []; + const datadir = level.datadir ?? config.defaults?.datadir ?? "data"; + const paths = this.getLevelPaths(level); + + for (const pathTemplate of paths) { + // Interpolate the path with facts and catalog variables + const interpolatedPath = this.parser.interpolatePath(pathTemplate, facts, catalogVariables); + const fullPath = this.resolvePath(path.join(datadir, interpolatedPath)); + + // Try to read and parse the file + const value = await this.getKeyFromFile(fullPath, key); + + if (value !== undefined) { + locations.push({ + file: path.join(datadir, interpolatedPath), + hierarchyLevel: level.name, + lineNumber: await this.findKeyLineNumber(fullPath, key), + value, + }); + } + } + + return locations; + } + + /** + * Get all paths from a hierarchy level + * + * @param level - Hierarchy level + * @returns Array of path templates + */ + private getLevelPaths(level: HierarchyLevel): string[] { + const paths: string[] = []; + + if (level.path) { + paths.push(level.path); + } + if (level.paths) { + paths.push(...level.paths); + } + if (level.glob) { + paths.push(level.glob); + } + if (level.globs) { + paths.push(...level.globs); + } + + return paths; + } + + /** + * Get a key's value from a hieradata file + * + * @param filePath - Path to the hieradata file + * @param key - Key to look up + * @returns Value or undefined if not found + */ + private async getKeyFromFile(filePath: string, key: string): Promise { + if (!fs.existsSync(filePath)) { + return undefined; + } + + let content: string; + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch { + return undefined; + } + + let data: unknown; + try { + data = parseYaml(content); + } catch { + return undefined; + } + + if (!data || typeof data !== "object") { + return undefined; + } + + return this.getNestedValue(data as Record, key); + } + + /** + * Get a nested value from an object using dot notation + * Uses Object.hasOwn() to prevent prototype pollution attacks + * + * @param obj - Object to traverse + * @param key - Dot-separated key path + * @returns Value at path or undefined + */ + private getNestedValue(obj: Record, key: string): unknown { + // First try direct key lookup (for keys like "profile::nginx::port") + // Use Object.hasOwn to prevent prototype pollution + if (Object.hasOwn(obj, key)) { + return obj[key]; + } + + // Then try nested lookup using dot notation + const parts = key.split("."); + let current: unknown = obj; + + for (const part of parts) { + if (current === null || current === undefined) { + return undefined; + } + if (typeof current !== "object") { + return undefined; + } + // Use Object.hasOwn to prevent prototype pollution + if (!Object.hasOwn(current as Record, part)) { + return undefined; + } + current = (current as Record)[part]; + } + + return current; + } + + /** + * Find the line number where a key is defined in a file + * + * @param filePath - Path to the file + * @param key - Key to find + * @returns Line number (1-based) or 0 if not found + */ + private async findKeyLineNumber(filePath: string, key: string): Promise { + if (!fs.existsSync(filePath)) { + return 0; + } + + let content: string; + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch { + return 0; + } + + const lines = content.split("\n"); + + // For direct keys (like "profile::nginx::port"), search for the key directly + const directKeyPattern = new RegExp(`^\\s*["']?${this.escapeRegex(key)}["']?\\s*:`); + + for (let i = 0; i < lines.length; i++) { + if (directKeyPattern.test(lines[i])) { + return i + 1; + } + } + + // For nested keys, search for the last part + const parts = key.split("."); + const lastPart = parts[parts.length - 1]; + const nestedKeyPattern = new RegExp(`^\\s*["']?${this.escapeRegex(lastPart)}["']?\\s*:`); + + for (let i = 0; i < lines.length; i++) { + if (nestedKeyPattern.test(lines[i])) { + return i + 1; + } + } + + return 0; + } + + /** + * Escape special regex characters in a string + */ + private escapeRegex(str: string): string { + return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + } + + + /** + * Apply the lookup method to combine values + * + * @param values - Array of values from hierarchy levels + * @param method - Lookup method to apply + * @param mergeOptions - Options for merge operations + * @returns Combined value + */ + private applyLookupMethod( + values: unknown[], + method: LookupMethod, + mergeOptions?: MergeOptions + ): unknown { + if (values.length === 0) { + return undefined; + } + + switch (method) { + case "first": + return values[0]; + + case "unique": + return this.mergeUnique(values, mergeOptions); + + case "hash": + return this.mergeHash(values, mergeOptions); + + case "deep": + return this.mergeDeep(values, mergeOptions); + + default: + return values[0]; + } + } + + /** + * Merge values using 'unique' strategy + * Combines arrays, removing duplicates + * + * @param values - Values to merge + * @param mergeOptions - Merge options + * @returns Merged array with unique values + */ + private mergeUnique(values: unknown[], mergeOptions?: MergeOptions): unknown[] { + const result: unknown[] = []; + const seen = new Set(); + const knockoutPrefix = mergeOptions?.knockoutPrefix; + + for (const value of values) { + if (Array.isArray(value)) { + for (const item of value) { + // Handle knockout prefix + if (knockoutPrefix && typeof item === "string" && item.startsWith(knockoutPrefix)) { + const knockedOut = item.slice(knockoutPrefix.length); + seen.add(JSON.stringify(knockedOut)); + continue; + } + + const key = JSON.stringify(item); + if (!seen.has(key)) { + seen.add(key); + result.push(item); + } + } + } else if (value !== undefined && value !== null) { + const key = JSON.stringify(value); + if (!seen.has(key)) { + seen.add(key); + result.push(value); + } + } + } + + if (mergeOptions?.sortMergedArrays) { + result.sort((a, b) => { + const aStr = JSON.stringify(a); + const bStr = JSON.stringify(b); + return aStr.localeCompare(bStr); + }); + } + + return result; + } + + /** + * Merge values using 'hash' strategy + * Combines hashes, with higher priority values winning + * + * @param values - Values to merge + * @param mergeOptions - Merge options + * @returns Merged hash + */ + private mergeHash(values: unknown[], mergeOptions?: MergeOptions): Record { + const result: Record = {}; + const knockoutPrefix = mergeOptions?.knockoutPrefix; + + // Process in reverse order so higher priority (earlier) values win + for (let i = values.length - 1; i >= 0; i--) { + const value = values[i]; + if (value && typeof value === "object" && !Array.isArray(value)) { + for (const [key, val] of Object.entries(value as Record)) { + // Handle knockout prefix + if (knockoutPrefix && key.startsWith(knockoutPrefix)) { + const knockedOut = key.slice(knockoutPrefix.length); + delete result[knockedOut]; + continue; + } + result[key] = val; + } + } + } + + return result; + } + + /** + * Merge values using 'deep' strategy + * Recursively merges hashes and arrays + * + * @param values - Values to merge + * @param mergeOptions - Merge options + * @returns Deep merged value + */ + private mergeDeep(values: unknown[], mergeOptions?: MergeOptions): unknown { + if (values.length === 0) { + return undefined; + } + + const knockoutPrefix = mergeOptions?.knockoutPrefix; + + // Start with the last value (lowest priority) and merge upward + let result: unknown = this.deepClone(values[values.length - 1]); + + for (let i = values.length - 2; i >= 0; i--) { + result = this.deepMergeTwo(result, values[i], knockoutPrefix, mergeOptions); + } + + return result; + } + + /** + * Deep merge two values + * + * @param base - Base value + * @param override - Override value + * @param knockoutPrefix - Prefix for knockout entries + * @param mergeOptions - Merge options + * @returns Merged value + */ + private deepMergeTwo( + base: unknown, + override: unknown, + knockoutPrefix?: string, + mergeOptions?: MergeOptions + ): unknown { + // If override is null/undefined, return base + if (override === null || override === undefined) { + return base; + } + + // If base is null/undefined, return override + if (base === null || base === undefined) { + return this.deepClone(override); + } + + // If both are arrays + if (Array.isArray(base) && Array.isArray(override)) { + if (mergeOptions?.mergeHashArrays) { + // Merge arrays element by element + const result = [...base]; + for (const item of override) { + if (knockoutPrefix && typeof item === "string" && item.startsWith(knockoutPrefix)) { + const knockedOut = item.slice(knockoutPrefix.length); + const idx = result.findIndex(r => JSON.stringify(r) === JSON.stringify(knockedOut)); + if (idx !== -1) { + result.splice(idx, 1); + } + } else if (!result.some(r => JSON.stringify(r) === JSON.stringify(item))) { + result.push(item); + } + } + return result; + } + // Default: override replaces base for arrays + return this.deepClone(override); + } + + // If both are objects + if ( + typeof base === "object" && + typeof override === "object" && + !Array.isArray(base) && + !Array.isArray(override) + ) { + const result: Record = { ...(base as Record) }; + + for (const [key, val] of Object.entries(override as Record)) { + // Handle knockout prefix + if (knockoutPrefix && key.startsWith(knockoutPrefix)) { + const knockedOut = key.slice(knockoutPrefix.length); + delete result[knockedOut]; + continue; + } + + if (key in result) { + result[key] = this.deepMergeTwo(result[key], val, knockoutPrefix, mergeOptions); + } else { + result[key] = this.deepClone(val); + } + } + + return result; + } + + // For primitives, override wins + return this.deepClone(override); + } + + /** + * Deep clone a value + */ + private deepClone(value: T): T { + if (value === null || value === undefined) { + return value; + } + if (typeof value !== "object") { + return value; + } + return JSON.parse(JSON.stringify(value)) as T; + } + + + /** + * Get lookup options for a key from hieradata files + * + * @param key - The key to get options for + * @param config - Hiera configuration + * @param facts - Node facts + * @returns Lookup options or undefined + */ + private async getLookupOptionsForKey( + key: string, + config: HieraConfig, + facts: Facts + ): Promise { + // Check each hierarchy level for lookup_options + for (const level of config.hierarchy) { + const datadir = level.datadir ?? config.defaults?.datadir ?? "data"; + const paths = this.getLevelPaths(level); + + for (const pathTemplate of paths) { + const interpolatedPath = this.parser.interpolatePath(pathTemplate, facts); + const fullPath = this.resolvePath(path.join(datadir, interpolatedPath)); + + // Check cache first + const cacheKey = fullPath; + let lookupOptionsMap = this.lookupOptionsCache.get(cacheKey); + + if (!lookupOptionsMap) { + lookupOptionsMap = await this.parser.parseLookupOptions(fullPath); + this.lookupOptionsCache.set(cacheKey, lookupOptionsMap); + } + + // Check for exact key match + if (lookupOptionsMap.has(key)) { + return lookupOptionsMap.get(key); + } + + // Check for pattern matches (e.g., "profile::*") + for (const [pattern, options] of lookupOptionsMap) { + if (this.matchesPattern(key, pattern)) { + return options; + } + } + } + } + + return undefined; + } + + /** + * Check if a key matches a pattern (supports * wildcard) + * + * @param key - Key to check + * @param pattern - Pattern to match against + * @returns True if matches + */ + private matchesPattern(key: string, pattern: string): boolean { + if (!pattern.includes("*")) { + return key === pattern; + } + + // Convert pattern to regex + const regexPattern = pattern + .replace(/[.+?^${}()|[\]\\]/g, "\\$&") + .replace(/\*/g, ".*"); + + const regex = new RegExp(`^${regexPattern}$`); + return regex.test(key); + } + + /** + * Build merge options from lookup options + * + * @param lookupOptions - Lookup options + * @returns Merge options + */ + private buildMergeOptions(lookupOptions?: LookupOptions): MergeOptions | undefined { + if (!lookupOptions?.merge) { + return undefined; + } + + return { + strategy: lookupOptions.merge, + knockoutPrefix: lookupOptions.knockout_prefix, + }; + } + + /** + * Interpolate variables in a value + * + * Supports: + * - %{facts.xxx} - Hiera 5 fact syntax + * - %{::xxx} - Legacy top-scope variable syntax + * - %{xxx} - Simple variable syntax + * + * @param value - Value to interpolate + * @param facts - Node facts + * @returns Interpolated value and variables used + */ + interpolateValue( + value: unknown, + facts: Facts + ): { value: unknown; variables: Record } { + return this.interpolateValueWithCatalog(value, facts, {}); + } + + /** + * Interpolate variables in a value using both facts and catalog variables + * + * Supports: + * - %{facts.xxx} - Hiera 5 fact syntax + * - %{::xxx} - Legacy top-scope variable syntax + * - %{xxx} - Simple variable syntax (checks catalog variables first, then facts) + * + * @param value - Value to interpolate + * @param facts - Node facts + * @param catalogVariables - Variables from catalog compilation + * @returns Interpolated value and variables used + */ + interpolateValueWithCatalog( + value: unknown, + facts: Facts, + catalogVariables: Record + ): { value: unknown; variables: Record } { + const variables: Record = {}; + + if (typeof value === "string") { + const interpolated = this.interpolateStringWithCatalog(value, facts, catalogVariables, variables); + return { value: interpolated, variables }; + } + + if (Array.isArray(value)) { + const interpolated = value.map(item => { + const result = this.interpolateValueWithCatalog(item, facts, catalogVariables); + Object.assign(variables, result.variables); + return result.value; + }); + return { value: interpolated, variables }; + } + + if (value && typeof value === "object") { + const interpolated: Record = {}; + for (const [key, val] of Object.entries(value as Record)) { + const result = this.interpolateValueWithCatalog(val, facts, catalogVariables); + Object.assign(variables, result.variables); + interpolated[key] = result.value; + } + return { value: interpolated, variables }; + } + + return { value, variables }; + } + + /** + * Interpolate variables in a string using both facts and catalog variables + * + * @param str - String to interpolate + * @param facts - Node facts + * @param catalogVariables - Variables from catalog compilation + * @param variables - Object to track used variables + * @returns Interpolated string + */ + private interpolateStringWithCatalog( + str: string, + facts: Facts, + catalogVariables: Record, + variables: Record + ): string { + const variablePattern = /%\{([^}]+)\}/g; + + return str.replace(variablePattern, (match, variable: string) => { + const trimmedVar = variable.trim(); + const value = this.resolveVariableWithCatalog(trimmedVar, facts, catalogVariables); + + if (value !== undefined) { + variables[trimmedVar] = value; + return String(value); + } + + // Return original if not resolved + return match; + }); + } + + /** + * Resolve a variable reference to its value, checking catalog variables first + * + * @param variable - Variable reference + * @param facts - Node facts + * @param catalogVariables - Variables from catalog compilation + * @returns Resolved value or undefined + */ + private resolveVariableWithCatalog( + variable: string, + facts: Facts, + catalogVariables: Record + ): unknown { + // Handle facts.xxx syntax - always use facts + if (variable.startsWith("facts.")) { + const factPath = variable.slice(6); + return this.getNestedFactValue(facts.facts, factPath); + } + + // Handle ::xxx legacy syntax - always use facts + if (variable.startsWith("::")) { + const factName = variable.slice(2); + return this.getNestedFactValue(facts.facts, factName); + } + + // Handle trusted.xxx syntax + if (variable.startsWith("trusted.")) { + const trustedPath = variable.slice(8); + const trusted = facts.facts["trusted"] as Record | undefined; + if (trusted) { + return this.getNestedFactValue(trusted, trustedPath); + } + return undefined; + } + + // Handle server_facts.xxx syntax + if (variable.startsWith("server_facts.")) { + const serverPath = variable.slice(13); + const serverFacts = facts.facts["server_facts"] as Record | undefined; + if (serverFacts) { + return this.getNestedFactValue(serverFacts, serverPath); + } + return undefined; + } + + // For other variables, check catalog variables first (code-defined variables) + // This allows Puppet code variables to override facts + if (Object.hasOwn(catalogVariables, variable)) { + return catalogVariables[variable]; + } + + // Check nested catalog variables (e.g., profile::nginx::port) + const catalogValue = this.getNestedValue(catalogVariables, variable); + if (catalogValue !== undefined) { + return catalogValue; + } + + // Fall back to direct fact lookup + return this.getNestedFactValue(facts.facts, variable); + } + + /** + * Get a nested value from facts using dot notation + * Uses Object.hasOwn() to prevent prototype pollution attacks + * + * @param obj - Object to traverse + * @param path - Dot-separated path + * @returns Value at path or undefined + */ + private getNestedFactValue(obj: Record, path: string): unknown { + const parts = path.split("."); + let current: unknown = obj; + + for (const part of parts) { + if (current === null || current === undefined) { + return undefined; + } + if (typeof current !== "object") { + return undefined; + } + // Use Object.hasOwn to prevent prototype pollution + if (!Object.hasOwn(current as Record, part)) { + return undefined; + } + current = (current as Record)[part]; + } + + return current; + } + + + /** + * Create a not-found result for a key + * + * @param key - The key that was not found + * @param lookupMethod - The lookup method used + * @param defaultValue - Optional default value + * @returns HieraResolution indicating not found + */ + private createNotFoundResult( + key: string, + lookupMethod: LookupMethod, + defaultValue?: unknown + ): HieraResolution { + return { + key, + resolvedValue: defaultValue, + lookupMethod, + sourceFile: "", + hierarchyLevel: "", + allValues: [], + found: false, + }; + } + + /** + * Resolve a path relative to the control repository + * + * @param filePath - Path to resolve + * @returns Absolute path + */ + private resolvePath(filePath: string): string { + if (path.isAbsolute(filePath)) { + return filePath; + } + return path.join(this.controlRepoPath, filePath); + } + + /** + * Clear the lookup options cache + */ + clearCache(): void { + this.lookupOptionsCache.clear(); + } + + /** + * Get the control repository path + * + * @returns Control repository path + */ + getControlRepoPath(): string { + return this.controlRepoPath; + } +} diff --git a/backend/src/integrations/hiera/HieraScanner.ts b/backend/src/integrations/hiera/HieraScanner.ts new file mode 100644 index 0000000..e99e202 --- /dev/null +++ b/backend/src/integrations/hiera/HieraScanner.ts @@ -0,0 +1,722 @@ +/** + * HieraScanner + * + * Scans hieradata directories to build an index of all Hiera keys. + * Tracks file paths, hierarchy levels, line numbers, and values for each key. + */ + +import * as fs from "fs"; +import * as path from "path"; +import { parse as parseYaml } from "yaml"; +import type { + HieraKey, + HieraKeyLocation, + HieraKeyIndex, + HieraFileInfo, + LookupOptions, +} from "./types"; + +/** + * Result of scanning a single file + */ +export interface FileScanResult { + success: boolean; + keys: Map; + lookupOptions: Map; + error?: string; +} + +/** + * Callback for file change events + */ +export type FileChangeCallback = (changedFiles: string[]) => void; + +/** + * HieraScanner class for scanning hieradata directories + */ +export class HieraScanner { + private controlRepoPath: string; + private hieradataPath: string; + private keyIndex: HieraKeyIndex; + private fileWatcher: fs.FSWatcher | null = null; + private changeCallbacks: FileChangeCallback[] = []; + private isWatching = false; + + constructor(controlRepoPath: string, hieradataPath: string = "data") { + this.controlRepoPath = controlRepoPath; + this.hieradataPath = hieradataPath; + this.keyIndex = this.createEmptyIndex(); + } + + /** + * Scan the hieradata directory and build the key index + * + * @param hieradataPath - Optional override for hieradata path + * @returns The complete key index + */ + async scan(hieradataPath?: string): Promise { + const dataPath = hieradataPath ?? this.hieradataPath; + const fullPath = this.resolvePath(dataPath); + + // Reset the index + this.keyIndex = this.createEmptyIndex(); + + if (!fs.existsSync(fullPath)) { + console.warn(`[HieraScanner] Hieradata path does not exist: ${fullPath}`); + return this.keyIndex; + } + + // Recursively scan all YAML/JSON files + await this.scanDirectory(fullPath, dataPath); + + // Update metadata + this.keyIndex.lastScan = new Date().toISOString(); + this.keyIndex.totalKeys = this.keyIndex.keys.size; + this.keyIndex.totalFiles = this.keyIndex.files.size; + + return this.keyIndex; + } + + /** + * Get the current key index + * + * @returns The current key index + */ + getKeyIndex(): HieraKeyIndex { + return this.keyIndex; + } + + /** + * Get all keys from the index + * + * @returns Array of all HieraKey objects + */ + getAllKeys(): HieraKey[] { + return Array.from(this.keyIndex.keys.values()); + } + + /** + * Get a specific key by name + * + * @param keyName - The key name to look up + * @returns The HieraKey or undefined if not found + */ + getKey(keyName: string): HieraKey | undefined { + return this.keyIndex.keys.get(keyName); + } + + + /** + * Search for keys matching a query string + * + * Supports partial key name matching (case-insensitive). + * + * @param query - Search query string + * @returns Array of matching HieraKey objects + */ + searchKeys(query: string): HieraKey[] { + if (!query || query.trim() === "") { + return this.getAllKeys(); + } + + const lowerQuery = query.toLowerCase(); + const results: HieraKey[] = []; + + for (const [keyName, key] of this.keyIndex.keys) { + if (keyName.toLowerCase().includes(lowerQuery)) { + results.push(key); + } + } + + return results; + } + + /** + * Watch the hieradata directory for changes + * + * @param callback - Callback to invoke when files change + */ + watchForChanges(callback: FileChangeCallback): void { + this.changeCallbacks.push(callback); + + if (this.isWatching) { + return; + } + + const fullPath = this.resolvePath(this.hieradataPath); + + if (!fs.existsSync(fullPath)) { + console.warn(`[HieraScanner] Cannot watch non-existent path: ${fullPath}`); + return; + } + + try { + this.fileWatcher = fs.watch( + fullPath, + { recursive: true }, + (_eventType, filename) => { + if (filename && this.isHieradataFile(filename)) { + this.notifyChange([filename]); + } + } + ); + this.isWatching = true; + } catch (error) { + console.error(`[HieraScanner] Failed to start file watcher: ${this.getErrorMessage(error)}`); + } + } + + /** + * Stop watching for file changes + */ + stopWatching(): void { + if (this.fileWatcher) { + this.fileWatcher.close(); + this.fileWatcher = null; + } + this.isWatching = false; + this.changeCallbacks = []; + } + + /** + * Recursively scan a directory for hieradata files + * + * @param dirPath - Absolute path to directory + * @param relativePath - Path relative to control repo + */ + private async scanDirectory(dirPath: string, relativePath: string): Promise { + let entries: fs.Dirent[]; + + try { + entries = fs.readdirSync(dirPath, { withFileTypes: true }); + } catch (error) { + console.warn(`[HieraScanner] Failed to read directory ${dirPath}: ${this.getErrorMessage(error)}`); + return; + } + + for (const entry of entries) { + const entryPath = path.join(dirPath, entry.name); + const entryRelativePath = path.join(relativePath, entry.name); + + if (entry.isDirectory()) { + await this.scanDirectory(entryPath, entryRelativePath); + } else if (entry.isFile() && this.isHieradataFile(entry.name)) { + await this.scanFile(entryPath, entryRelativePath); + } + } + } + + /** + * Scan a single hieradata file + * + * @param filePath - Absolute path to file + * @param relativePath - Path relative to control repo + */ + private async scanFile(filePath: string, relativePath: string): Promise { + const result = this.scanFileContent(filePath, relativePath); + + if (!result.success) { + console.warn(`[HieraScanner] Failed to scan file ${relativePath}: ${result.error}`); + return; + } + + // Get file stats for lastModified + let lastModified: string; + try { + const stats = fs.statSync(filePath); + lastModified = stats.mtime.toISOString(); + } catch { + lastModified = new Date().toISOString(); + } + + // Determine hierarchy level from path + const hierarchyLevel = this.determineHierarchyLevel(relativePath); + + // Add file info + const fileInfo: HieraFileInfo = { + path: relativePath, + hierarchyLevel, + keys: Array.from(result.keys.keys()), + lastModified, + }; + this.keyIndex.files.set(relativePath, fileInfo); + + // Merge keys into the index + for (const [keyName, location] of result.keys) { + this.addKeyLocation(keyName, location, result.lookupOptions.get(keyName)); + } + } + + + /** + * Scan a file and extract all keys with their locations + * + * @param filePath - Absolute path to file + * @param relativePath - Path relative to control repo + * @returns Scan result with keys and lookup options + */ + scanFileContent(filePath: string, relativePath: string): FileScanResult { + let content: string; + + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch (error) { + return { + success: false, + keys: new Map(), + lookupOptions: new Map(), + error: `Failed to read file: ${this.getErrorMessage(error)}`, + }; + } + + return this.parseFileContent(content, relativePath); + } + + /** + * Parse file content and extract keys + * + * @param content - File content string + * @param relativePath - Path relative to control repo + * @returns Scan result with keys and lookup options + */ + parseFileContent(content: string, relativePath: string): FileScanResult { + const keys = new Map(); + const lookupOptions = new Map(); + + let data: unknown; + try { + data = parseYaml(content, { strict: false }); + } catch (error) { + return { + success: false, + keys, + lookupOptions, + error: `YAML parse error: ${this.getErrorMessage(error)}`, + }; + } + + if (!data || typeof data !== "object") { + // Empty file or non-object content + return { success: true, keys, lookupOptions }; + } + + const hierarchyLevel = this.determineHierarchyLevel(relativePath); + + // Extract keys from the data + this.extractKeys( + data as Record, + "", + relativePath, + hierarchyLevel, + content, + keys + ); + + // Extract lookup_options if present + const dataObj = data as Record; + if (dataObj.lookup_options && typeof dataObj.lookup_options === "object") { + this.extractLookupOptions( + dataObj.lookup_options as Record, + lookupOptions + ); + } + + return { success: true, keys, lookupOptions }; + } + + /** + * Extract keys from a data object recursively + * + * Handles nested objects and builds dot-notation keys. + * + * @param data - Data object to extract keys from + * @param prefix - Current key prefix for nested keys + * @param filePath - File path for location tracking + * @param hierarchyLevel - Hierarchy level name + * @param content - Original file content for line number detection + * @param keys - Map to store extracted keys + */ + private extractKeys( + data: Record, + prefix: string, + filePath: string, + hierarchyLevel: string, + content: string, + keys: Map + ): void { + for (const [key, value] of Object.entries(data)) { + // Skip lookup_options - it's metadata, not data + if (key === "lookup_options") { + continue; + } + + const fullKey = prefix ? `${prefix}.${key}` : key; + const lineNumber = this.findKeyLineNumber(content, key, prefix); + + // Add the key location + const location: HieraKeyLocation = { + file: filePath, + hierarchyLevel, + lineNumber, + value, + }; + keys.set(fullKey, location); + + // If value is an object (but not array), recurse to extract nested keys + // This supports both flat keys and nested structures + if (value !== null && typeof value === "object" && !Array.isArray(value)) { + this.extractKeys( + value as Record, + fullKey, + filePath, + hierarchyLevel, + content, + keys + ); + } + } + } + + /** + * Extract lookup options from lookup_options section + * + * @param lookupOptionsData - Raw lookup_options object + * @param lookupOptions - Map to store extracted options + */ + private extractLookupOptions( + lookupOptionsData: Record, + lookupOptions: Map + ): void { + for (const [key, options] of Object.entries(lookupOptionsData)) { + if (options && typeof options === "object") { + const parsed = this.parseLookupOptions(options as Record); + if (parsed) { + lookupOptions.set(key, parsed); + } + } + } + } + + + /** + * Parse a single lookup options object + * + * @param options - Raw options object + * @returns Parsed LookupOptions or undefined + */ + private parseLookupOptions(options: Record): LookupOptions | undefined { + const result: LookupOptions = {}; + let hasValidOption = false; + + // Parse merge strategy + if (typeof options.merge === "string") { + const merge = options.merge.toLowerCase(); + if (this.isValidLookupMethod(merge)) { + result.merge = merge; + hasValidOption = true; + } + } else if (typeof options.merge === "object" && options.merge !== null) { + const mergeObj = options.merge as Record; + if (typeof mergeObj.strategy === "string") { + const strategy = mergeObj.strategy.toLowerCase(); + if (this.isValidLookupMethod(strategy)) { + result.merge = strategy; + hasValidOption = true; + } + } + } + + // Parse convert_to + if (typeof options.convert_to === "string") { + const convertTo = options.convert_to; + if (convertTo === "Array" || convertTo === "Hash") { + result.convert_to = convertTo; + hasValidOption = true; + } + } + + // Parse knockout_prefix + if (typeof options.knockout_prefix === "string") { + result.knockout_prefix = options.knockout_prefix; + hasValidOption = true; + } + + return hasValidOption ? result : undefined; + } + + /** + * Check if a string is a valid lookup method + */ + private isValidLookupMethod(method: string): method is "first" | "unique" | "hash" | "deep" { + return ["first", "unique", "hash", "deep"].includes(method); + } + + /** + * Find the line number where a key is defined + * + * @param content - File content + * @param key - Key name to find + * @param _prefix - Parent key prefix (for nested keys) - unused but kept for API consistency + * @returns Line number (1-based) or 0 if not found + */ + private findKeyLineNumber(content: string, key: string, _prefix: string): number { + const lines = content.split("\n"); + + // Escape special regex characters in the key + const escapedKey = key.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + + // Pattern to match the key at the start of a line (with optional indentation) + const keyPattern = new RegExp(`^\\s*["']?${escapedKey}["']?\\s*:`); + + for (let i = 0; i < lines.length; i++) { + if (keyPattern.test(lines[i])) { + return i + 1; // 1-based line numbers + } + } + + return 0; // Not found + } + + /** + * Determine the hierarchy level from a file path + * + * @param relativePath - Path relative to hieradata directory + * @returns Hierarchy level name + */ + private determineHierarchyLevel(relativePath: string): string { + // Extract meaningful hierarchy level from path + const parts = relativePath.split(path.sep); + + // Remove the data directory prefix if present + if (parts[0] === "data" || parts[0] === "hieradata") { + parts.shift(); + } + + // Common patterns: + // - nodes/hostname.yaml -> "Per-node data" + // - os/family.yaml -> "Per-OS data" + // - environments/env.yaml -> "Per-environment data" + // - common.yaml -> "Common data" + + if (parts.length === 0) { + return "Common data"; + } + + const firstPart = parts[0].toLowerCase(); + const fileName = parts[parts.length - 1]; + + if (fileName === "common.yaml" || fileName === "common.json") { + return "Common data"; + } + + if (firstPart === "nodes" || firstPart === "node") { + return "Per-node data"; + } + + if (firstPart === "os" || firstPart === "osfamily") { + return "Per-OS data"; + } + + if (firstPart === "environments" || firstPart === "environment") { + return "Per-environment data"; + } + + if (firstPart === "roles" || firstPart === "role") { + return "Per-role data"; + } + + if (firstPart === "datacenter" || firstPart === "datacenters") { + return "Per-datacenter data"; + } + + // Default: use the directory name + return `${parts[0]} data`; + } + + /** + * Add a key location to the index + * + * @param keyName - Full key name + * @param location - Key location + * @param lookupOptions - Optional lookup options for the key + */ + private addKeyLocation( + keyName: string, + location: HieraKeyLocation, + lookupOptions?: LookupOptions + ): void { + let key = this.keyIndex.keys.get(keyName); + + if (!key) { + key = { + name: keyName, + locations: [], + lookupOptions, + }; + this.keyIndex.keys.set(keyName, key); + } + + // Add the location + key.locations.push(location); + + // Update lookup options if provided and not already set + if (lookupOptions && !key.lookupOptions) { + key.lookupOptions = lookupOptions; + } + } + + + /** + * Check if a filename is a hieradata file + * + * @param filename - File name to check + * @returns True if it's a YAML or JSON file + */ + private isHieradataFile(filename: string): boolean { + const ext = path.extname(filename).toLowerCase(); + return [".yaml", ".yml", ".json", ".eyaml"].includes(ext); + } + + /** + * Notify all callbacks of file changes + * + * @param changedFiles - Array of changed file paths + */ + private notifyChange(changedFiles: string[]): void { + for (const callback of this.changeCallbacks) { + try { + callback(changedFiles); + } catch (error) { + console.error(`[HieraScanner] Error in change callback: ${this.getErrorMessage(error)}`); + } + } + } + + /** + * Create an empty key index + * + * @returns Empty HieraKeyIndex + */ + private createEmptyIndex(): HieraKeyIndex { + return { + keys: new Map(), + files: new Map(), + lastScan: "", + totalKeys: 0, + totalFiles: 0, + }; + } + + /** + * Resolve a path relative to the control repository + * + * @param filePath - Path to resolve + * @returns Absolute path + */ + private resolvePath(filePath: string): string { + if (path.isAbsolute(filePath)) { + return filePath; + } + return path.join(this.controlRepoPath, filePath); + } + + /** + * Extract error message from unknown error + * + * @param error - Unknown error + * @returns Error message string + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } + + /** + * Get the control repository path + * + * @returns Control repository path + */ + getControlRepoPath(): string { + return this.controlRepoPath; + } + + /** + * Get the hieradata path + * + * @returns Hieradata path + */ + getHieradataPath(): string { + return this.hieradataPath; + } + + /** + * Update the hieradata path + * + * @param hieradataPath - New hieradata path + */ + setHieradataPath(hieradataPath: string): void { + this.hieradataPath = hieradataPath; + } + + /** + * Check if the scanner is currently watching for changes + * + * @returns True if watching + */ + isWatchingForChanges(): boolean { + return this.isWatching; + } + + /** + * Invalidate the cache for specific files + * + * @param filePaths - Array of file paths to invalidate + */ + invalidateFiles(filePaths: string[]): void { + for (const filePath of filePaths) { + const fileInfo = this.keyIndex.files.get(filePath); + if (fileInfo) { + // Remove keys that were only in this file + for (const keyName of fileInfo.keys) { + const key = this.keyIndex.keys.get(keyName); + if (key) { + // Remove locations from this file + key.locations = key.locations.filter(loc => loc.file !== filePath); + // If no locations left, remove the key + if (key.locations.length === 0) { + this.keyIndex.keys.delete(keyName); + } + } + } + // Remove file info + this.keyIndex.files.delete(filePath); + } + } + + // Update counts + this.keyIndex.totalKeys = this.keyIndex.keys.size; + this.keyIndex.totalFiles = this.keyIndex.files.size; + } + + /** + * Rescan specific files and update the index + * + * @param filePaths - Array of file paths to rescan + */ + async rescanFiles(filePaths: string[]): Promise { + // First invalidate the files + this.invalidateFiles(filePaths); + + // Then rescan each file + for (const relativePath of filePaths) { + const fullPath = this.resolvePath(relativePath); + if (fs.existsSync(fullPath)) { + await this.scanFile(fullPath, relativePath); + } + } + + // Update metadata + this.keyIndex.lastScan = new Date().toISOString(); + this.keyIndex.totalKeys = this.keyIndex.keys.size; + this.keyIndex.totalFiles = this.keyIndex.files.size; + } +} diff --git a/backend/src/integrations/hiera/HieraService.ts b/backend/src/integrations/hiera/HieraService.ts new file mode 100644 index 0000000..f2ee83a --- /dev/null +++ b/backend/src/integrations/hiera/HieraService.ts @@ -0,0 +1,960 @@ +/** + * HieraService + * + * Core service orchestrating Hiera operations including parsing, scanning, + * resolution, and fact retrieval. Implements caching for performance optimization. + * Supports optional catalog compilation for code-defined variable resolution. + * + * Requirements: 15.1, 15.5 - Cache parsed hieradata and resolved values + * Requirements: 12.2, 12.3, 12.4 - Catalog compilation mode with fallback + */ + +import type { IntegrationManager } from "../IntegrationManager"; +import { HieraParser } from "./HieraParser"; +import { HieraScanner } from "./HieraScanner"; +import { HieraResolver } from "./HieraResolver"; +import type { CatalogAwareResolveOptions } from "./HieraResolver"; +import { FactService } from "./FactService"; +import { CatalogCompiler } from "./CatalogCompiler"; +import type { + HieraConfig, + HieraKey, + HieraKeyIndex, + HieraResolution, + NodeHieraData, + KeyNodeValues, + ValueGroup, + Facts, + HieraCacheConfig, + FactSourceConfig, + CatalogCompilationConfig, +} from "./types"; + +/** + * Cache entry for resolved values + */ +interface CacheEntry { + value: T; + cachedAt: number; + expiresAt: number; +} + +/** + * Configuration for HieraService + */ +export interface HieraServiceConfig { + controlRepoPath: string; + hieraConfigPath: string; + hieradataPath?: string; + factSources: FactSourceConfig; + cache: HieraCacheConfig; + catalogCompilation?: CatalogCompilationConfig; +} + +/** + * HieraService + * + * Orchestrates HieraParser, HieraScanner, HieraResolver, FactService, and CatalogCompiler + * to provide unified Hiera data access with caching and optional catalog compilation. + */ +export class HieraService { + private parser: HieraParser; + private scanner: HieraScanner; + private resolver: HieraResolver; + private factService: FactService; + private catalogCompiler: CatalogCompiler | null = null; + private integrationManager: IntegrationManager; + + private config: HieraServiceConfig; + private hieraConfig: HieraConfig | null = null; + private initialized = false; + + // Cache storage + private keyIndexCache: CacheEntry | null = null; + private resolutionCache: Map> = new Map(); + private nodeDataCache: Map> = new Map(); + private hieraConfigCache: CacheEntry | null = null; + + // Cache configuration + private cacheEnabled: boolean; + private cacheTTL: number; + private maxCacheEntries: number; + + constructor( + integrationManager: IntegrationManager, + config: HieraServiceConfig + ) { + this.integrationManager = integrationManager; + this.config = config; + + // Initialize components + this.parser = new HieraParser(config.controlRepoPath); + this.scanner = new HieraScanner( + config.controlRepoPath, + config.hieradataPath ?? "data" + ); + this.resolver = new HieraResolver(config.controlRepoPath); + this.factService = new FactService(integrationManager, config.factSources); + + // Initialize catalog compiler if configured + if (config.catalogCompilation) { + this.catalogCompiler = new CatalogCompiler( + integrationManager, + config.catalogCompilation + ); + this.log(`CatalogCompiler initialized (enabled: ${config.catalogCompilation.enabled})`); + } + + // Cache configuration + this.cacheEnabled = config.cache.enabled; + this.cacheTTL = config.cache.ttl; + this.maxCacheEntries = config.cache.maxEntries; + + this.log("HieraService created"); + } + + /** + * Initialize the service + * + * Parses hiera.yaml and performs initial scan of hieradata. + */ + async initialize(): Promise { + this.log("Initializing HieraService..."); + + // Parse hiera.yaml + const parseResult = await this.parser.parse(this.config.hieraConfigPath); + if (!parseResult.success || !parseResult.config) { + throw new Error( + `Failed to parse hiera.yaml: ${parseResult.error?.message ?? "Unknown error"}` + ); + } + + this.hieraConfig = parseResult.config; + + // Cache the parsed config + if (this.cacheEnabled) { + this.hieraConfigCache = this.createCacheEntry(this.hieraConfig); + } + + // Perform initial scan + await this.scanner.scan(); + + // Set up file watching for cache invalidation + this.scanner.watchForChanges((changedFiles) => { + this.handleFileChanges(changedFiles); + }); + + this.initialized = true; + this.log("HieraService initialized successfully"); + } + + /** + * Check if the service is initialized + */ + isInitialized(): boolean { + return this.initialized; + } + + // ============================================================================ + // Key Discovery Methods + // ============================================================================ + + /** + * Get all discovered Hiera keys + * + * @returns Key index with all discovered keys + */ + async getAllKeys(): Promise { + this.ensureInitialized(); + + // Check cache + if (this.cacheEnabled && this.keyIndexCache && !this.isCacheExpired(this.keyIndexCache)) { + return this.keyIndexCache.value; + } + + // Scan for keys + const keyIndex = await this.scanner.scan(); + + // Update cache + if (this.cacheEnabled) { + this.keyIndexCache = this.createCacheEntry(keyIndex); + } + + return keyIndex; + } + + /** + * Search for keys matching a query + * + * @param query - Search query (partial key name, case-insensitive) + * @returns Array of matching keys + */ + async searchKeys(query: string): Promise { + this.ensureInitialized(); + + // Ensure key index is loaded + await this.getAllKeys(); + + return this.scanner.searchKeys(query); + } + + /** + * Get a specific key by name + * + * @param keyName - Full key name + * @returns Key details or undefined if not found + */ + async getKey(keyName: string): Promise { + this.ensureInitialized(); + + // Ensure key index is loaded + await this.getAllKeys(); + + return this.scanner.getKey(keyName); + } + + // ============================================================================ + // Key Resolution Methods + // ============================================================================ + + /** + * Resolve a Hiera key for a specific node + * + * When catalog compilation is enabled, attempts to compile a catalog to extract + * code-defined variables. Falls back to fact-only resolution if compilation fails. + * + * @param nodeId - Node identifier (certname) + * @param key - Hiera key to resolve + * @param environment - Optional Puppet environment (defaults to "production") + * @returns Resolution result with value and metadata + * + * Requirements: 12.2, 12.3, 12.4 + */ + async resolveKey( + nodeId: string, + key: string, + environment: string = "production" + ): Promise { + this.ensureInitialized(); + + // Check cache + const cacheKey = this.buildResolutionCacheKey(nodeId, key); + if (this.cacheEnabled) { + const cached = this.resolutionCache.get(cacheKey); + if (cached && !this.isCacheExpired(cached)) { + return cached.value; + } + } + + // Get facts for the node + const factResult = await this.factService.getFacts(nodeId); + const facts = factResult.facts; + + // Build resolve options with catalog variables if compilation is enabled + const resolveOptions = await this.buildResolveOptions(nodeId, environment, facts); + + // Resolve the key with catalog variables (or empty if compilation disabled/failed) + const resolution = await this.resolver.resolve( + key, + facts, + this.hieraConfig!, + resolveOptions + ); + + // Update cache + if (this.cacheEnabled) { + this.addToResolutionCache(cacheKey, resolution); + } + + return resolution; + } + + /** + * Build resolve options with catalog variables if compilation is enabled + * + * Implements fallback behavior: if catalog compilation fails, returns empty + * variables with a warning message. + * + * @param nodeId - Node identifier + * @param environment - Puppet environment + * @param facts - Node facts + * @returns Resolve options with catalog variables and warnings + * + * Requirements: 12.3, 12.4 + */ + private async buildResolveOptions( + nodeId: string, + environment: string, + facts: Facts + ): Promise { + // If catalog compilation is not configured or disabled, return empty options + if (!this.catalogCompiler || !this.catalogCompiler.isEnabled()) { + return {}; + } + + // Attempt catalog compilation + const { variables, warnings } = await this.catalogCompiler.getVariables( + nodeId, + environment, + facts + ); + + // Log warnings if any (fallback occurred) + if (warnings && warnings.length > 0) { + for (const warning of warnings) { + this.log(warning, "warn"); + } + } + + return { + catalogVariables: variables, + catalogWarnings: warnings, + }; + } + + /** + * Resolve all keys for a specific node + * + * @param nodeId - Node identifier + * @param environment - Optional Puppet environment (defaults to "production") + * @returns Map of key names to resolution results + */ + async resolveAllKeys( + nodeId: string, + environment: string = "production" + ): Promise> { + this.ensureInitialized(); + + const results = new Map(); + + // Get all keys + const keyIndex = await this.getAllKeys(); + + // Get facts for the node + const factResult = await this.factService.getFacts(nodeId); + const facts = factResult.facts; + + // Build resolve options once for all keys (catalog compilation is expensive) + const resolveOptions = await this.buildResolveOptions(nodeId, environment, facts); + + // Resolve each key + for (const keyName of keyIndex.keys.keys()) { + const cacheKey = this.buildResolutionCacheKey(nodeId, keyName); + + // Check cache first + if (this.cacheEnabled) { + const cached = this.resolutionCache.get(cacheKey); + if (cached && !this.isCacheExpired(cached)) { + results.set(keyName, cached.value); + continue; + } + } + + // Resolve the key with catalog variables + const resolution = await this.resolver.resolve( + keyName, + facts, + this.hieraConfig!, + resolveOptions + ); + + results.set(keyName, resolution); + + // Update cache + if (this.cacheEnabled) { + this.addToResolutionCache(cacheKey, resolution); + } + } + + return results; + } + + // ============================================================================ + // Node-Specific Data Methods + // ============================================================================ + + /** + * Get all Hiera data for a specific node + * + * Includes used/unused key classification based on catalog analysis. + * Keys are classified as "used" if they match patterns associated with + * classes included in the node's catalog. + * + * @param nodeId - Node identifier + * @returns Node Hiera data including all keys and usage classification + * + * Requirements: 6.2, 6.6 + */ + async getNodeHieraData(nodeId: string): Promise { + this.ensureInitialized(); + + // Check cache + if (this.cacheEnabled) { + const cached = this.nodeDataCache.get(nodeId); + if (cached && !this.isCacheExpired(cached)) { + return cached.value; + } + } + + // Get facts + const factResult = await this.factService.getFacts(nodeId); + const facts = factResult.facts; + + // Resolve all keys + const keys = await this.resolveAllKeys(nodeId); + + // Classify keys as used/unused based on catalog analysis + const { usedKeys, unusedKeys } = await this.classifyKeyUsage(nodeId, keys); + + const nodeData: NodeHieraData = { + nodeId, + facts, + keys, + usedKeys, + unusedKeys, + }; + + // Update cache + if (this.cacheEnabled) { + this.addToNodeDataCache(nodeId, nodeData); + } + + return nodeData; + } + + /** + * Classify Hiera keys as used or unused based on catalog analysis + * + * Keys are classified as "used" if: + * 1. They match a class name pattern from the catalog (e.g., "profile::nginx::*") + * 2. They are referenced by a class included in the catalog + * + * @param nodeId - Node identifier + * @param keys - Map of resolved keys + * @returns Object with usedKeys and unusedKeys sets + * + * Requirements: 6.6 + */ + private async classifyKeyUsage( + nodeId: string, + keys: Map + ): Promise<{ usedKeys: Set; unusedKeys: Set }> { + const usedKeys = new Set(); + const unusedKeys = new Set(); + + // Try to get included classes from PuppetDB catalog + const includedClasses = await this.getIncludedClasses(nodeId); + + // If no catalog data available, mark all keys as unused + if (includedClasses.length === 0) { + for (const keyName of keys.keys()) { + unusedKeys.add(keyName); + } + return { usedKeys, unusedKeys }; + } + + // Build class prefixes for matching + // e.g., "profile::nginx" -> ["profile::nginx::", "profile::nginx"] + const classPrefixes = this.buildClassPrefixes(includedClasses); + + // Classify each key + for (const keyName of keys.keys()) { + if (this.isKeyUsedByClasses(keyName, classPrefixes)) { + usedKeys.add(keyName); + } else { + unusedKeys.add(keyName); + } + } + + return { usedKeys, unusedKeys }; + } + + /** + * Get list of classes included in a node's catalog + * + * Attempts to retrieve catalog from PuppetDB and extract class names. + * + * @param nodeId - Node identifier + * @returns Array of class names + */ + private async getIncludedClasses(nodeId: string): Promise { + try { + // Try to get PuppetDB service from integration manager + const puppetdb = this.integrationManager.getInformationSource("puppetdb"); + + if (!puppetdb || !puppetdb.isInitialized()) { + this.log("PuppetDB not available for catalog analysis"); + return []; + } + + // Get catalog data + const catalogData = await puppetdb.getNodeData(nodeId, "catalog"); + + if (!catalogData || typeof catalogData !== "object") { + this.log(`No catalog data available for node: ${nodeId}`); + return []; + } + + // Extract class names from catalog resources + const catalog = catalogData as { resources?: Array<{ type: string; title: string }> }; + + if (!catalog.resources || !Array.isArray(catalog.resources)) { + return []; + } + + // Filter for Class resources and extract titles + const classes = catalog.resources + .filter(resource => resource.type === "Class") + .map(resource => resource.title.toLowerCase()); + + this.log(`Found ${classes.length} classes in catalog for node: ${nodeId}`); + return classes; + } catch (error) { + this.log(`Failed to get catalog for key usage analysis: ${error instanceof Error ? error.message : String(error)}`); + return []; + } + } + + /** + * Build class prefixes for key matching + * + * Converts class names to prefixes that can be used to match Hiera keys. + * e.g., "profile::nginx" -> ["profile::nginx::", "profile::nginx"] + * + * @param classes - Array of class names + * @returns Set of prefixes + */ + private buildClassPrefixes(classes: string[]): Set { + const prefixes = new Set(); + + for (const className of classes) { + // Add the class name itself as a prefix + prefixes.add(className.toLowerCase()); + + // Add with trailing :: for nested keys + prefixes.add(`${className.toLowerCase()}::`); + + // Also add parent namespaces + // e.g., "profile::nginx::config" -> "profile::nginx", "profile" + const parts = className.split("::"); + for (let i = 1; i < parts.length; i++) { + const parentPrefix = parts.slice(0, i).join("::").toLowerCase(); + prefixes.add(parentPrefix); + prefixes.add(`${parentPrefix}::`); + } + } + + return prefixes; + } + + /** + * Check if a key is used by any of the included classes + * + * A key is considered "used" if: + * 1. It starts with a class prefix (e.g., "profile::nginx::port" matches "profile::nginx") + * 2. It exactly matches a class name + * + * @param keyName - Hiera key name + * @param classPrefixes - Set of class prefixes + * @returns True if key is used + */ + private isKeyUsedByClasses(keyName: string, classPrefixes: Set): boolean { + const lowerKey = keyName.toLowerCase(); + + // Check if key starts with any class prefix + for (const prefix of classPrefixes) { + if (lowerKey.startsWith(prefix)) { + return true; + } + } + + return false; + } + + // ============================================================================ + // Global Query Methods + // ============================================================================ + + /** + * Get key values across all nodes + * + * @param key - Hiera key to look up + * @returns Array of key values for each node + */ + async getKeyValuesAcrossNodes(key: string): Promise { + this.ensureInitialized(); + + const results: KeyNodeValues[] = []; + + // Get all available nodes + const nodes = await this.factService.listAvailableNodes(); + + // Resolve the key for each node + for (const nodeId of nodes) { + const resolution = await this.resolveKey(nodeId, key); + + results.push({ + nodeId, + value: resolution.resolvedValue, + sourceFile: resolution.sourceFile, + hierarchyLevel: resolution.hierarchyLevel, + found: resolution.found, + }); + } + + return results; + } + + /** + * Group nodes by their resolved value for a key + * + * Groups nodes that have the same resolved value together. + * Nodes where the key is not found are grouped separately. + * + * @param keyNodeValues - Array of key values for each node + * @returns Array of value groups + * + * Requirements: 7.5 + */ + groupNodesByValue(keyNodeValues: KeyNodeValues[]): ValueGroup[] { + const valueMap = new Map(); + + for (const result of keyNodeValues) { + // Use JSON.stringify to create a consistent key for the value + // Handle undefined/not found separately + const valueKey = result.found + ? JSON.stringify(result.value) + : "__NOT_FOUND__"; + + if (!valueMap.has(valueKey)) { + valueMap.set(valueKey, { + value: result.found ? result.value : undefined, + nodes: [], + }); + } + + valueMap.get(valueKey)!.nodes.push(result.nodeId); + } + + // Convert to array of ValueGroup + const groups: ValueGroup[] = []; + for (const [, group] of valueMap) { + groups.push({ + value: group.value, + nodes: group.nodes, + }); + } + + return groups; + } + + // ============================================================================ + // Cache Management Methods + // ============================================================================ + + /** + * Invalidate all caches + */ + invalidateCache(): void { + this.keyIndexCache = null; + this.resolutionCache.clear(); + this.nodeDataCache.clear(); + this.hieraConfigCache = null; + this.resolver.clearCache(); + this.log("All caches invalidated"); + } + + /** + * Invalidate cache for a specific node + * + * @param nodeId - Node identifier + */ + invalidateNodeCache(nodeId: string): void { + // Remove node data cache + this.nodeDataCache.delete(nodeId); + + // Remove all resolution cache entries for this node + const keysToDelete: string[] = []; + for (const cacheKey of this.resolutionCache.keys()) { + if (cacheKey.startsWith(`${nodeId}:`)) { + keysToDelete.push(cacheKey); + } + } + for (const key of keysToDelete) { + this.resolutionCache.delete(key); + } + + this.log(`Cache invalidated for node: ${nodeId}`); + } + + /** + * Reload the control repository data + * + * Re-parses hiera.yaml and rescans hieradata. + */ + async reloadControlRepo(): Promise { + this.log("Reloading control repository..."); + + // Invalidate all caches + this.invalidateCache(); + + // Re-parse hiera.yaml + const parseResult = await this.parser.parse(this.config.hieraConfigPath); + if (!parseResult.success || !parseResult.config) { + throw new Error( + `Failed to parse hiera.yaml: ${parseResult.error?.message ?? "Unknown error"}` + ); + } + + this.hieraConfig = parseResult.config; + + // Cache the parsed config + if (this.cacheEnabled) { + this.hieraConfigCache = this.createCacheEntry(this.hieraConfig); + } + + // Rescan hieradata + await this.scanner.scan(); + + this.log("Control repository reloaded successfully"); + } + + /** + * Get cache statistics + * + * @returns Cache statistics + */ + getCacheStats(): { + enabled: boolean; + ttl: number; + maxEntries: number; + resolutionCacheSize: number; + nodeDataCacheSize: number; + keyIndexCached: boolean; + hieraConfigCached: boolean; + } { + return { + enabled: this.cacheEnabled, + ttl: this.cacheTTL, + maxEntries: this.maxCacheEntries, + resolutionCacheSize: this.resolutionCache.size, + nodeDataCacheSize: this.nodeDataCache.size, + keyIndexCached: this.keyIndexCache !== null, + hieraConfigCached: this.hieraConfigCache !== null, + }; + } + + // ============================================================================ + // Component Accessors + // ============================================================================ + + /** + * Get the HieraParser instance + */ + getParser(): HieraParser { + return this.parser; + } + + /** + * Get the HieraScanner instance + */ + getScanner(): HieraScanner { + return this.scanner; + } + + /** + * Get the HieraResolver instance + */ + getResolver(): HieraResolver { + return this.resolver; + } + + /** + * Get the FactService instance + */ + getFactService(): FactService { + return this.factService; + } + + /** + * Get the parsed Hiera configuration + */ + getHieraConfig(): HieraConfig | null { + return this.hieraConfig; + } + + // ============================================================================ + // Private Helper Methods + // ============================================================================ + + /** + * Ensure the service is initialized + */ + private ensureInitialized(): void { + if (!this.initialized) { + throw new Error("HieraService is not initialized. Call initialize() first."); + } + } + + /** + * Handle file changes from the scanner + * + * @param changedFiles - Array of changed file paths + */ + private handleFileChanges(changedFiles: string[]): void { + this.log(`File changes detected: ${changedFiles.join(", ")}`); + + // Invalidate key index cache + this.keyIndexCache = null; + + // Invalidate all resolution caches (values may have changed) + this.resolutionCache.clear(); + + // Invalidate all node data caches + this.nodeDataCache.clear(); + + // Clear resolver's lookup options cache + this.resolver.clearCache(); + + this.log("Caches invalidated due to file changes"); + } + + /** + * Create a cache entry with expiration + * + * @param value - Value to cache + * @returns Cache entry + */ + private createCacheEntry(value: T): CacheEntry { + const now = Date.now(); + return { + value, + cachedAt: now, + expiresAt: now + this.cacheTTL, + }; + } + + /** + * Check if a cache entry is expired + * + * @param entry - Cache entry to check + * @returns True if expired + */ + private isCacheExpired(entry: CacheEntry): boolean { + return Date.now() > entry.expiresAt; + } + + /** + * Build a cache key for resolution results + * + * @param nodeId - Node identifier + * @param key - Hiera key + * @returns Cache key string + */ + private buildResolutionCacheKey(nodeId: string, key: string): string { + return `${nodeId}:${key}`; + } + + /** + * Add a resolution to the cache with LRU eviction + * + * @param cacheKey - Cache key + * @param resolution - Resolution to cache + */ + private addToResolutionCache(cacheKey: string, resolution: HieraResolution): void { + // Evict oldest entries if at capacity + if (this.resolutionCache.size >= this.maxCacheEntries) { + this.evictOldestCacheEntries(this.resolutionCache, Math.floor(this.maxCacheEntries * 0.1)); + } + + this.resolutionCache.set(cacheKey, this.createCacheEntry(resolution)); + } + + /** + * Add node data to the cache with LRU eviction + * + * @param nodeId - Node identifier + * @param nodeData - Node data to cache + */ + private addToNodeDataCache(nodeId: string, nodeData: NodeHieraData): void { + // Evict oldest entries if at capacity (use 10% of max for node data) + const maxNodeEntries = Math.floor(this.maxCacheEntries * 0.1); + if (this.nodeDataCache.size >= maxNodeEntries) { + this.evictOldestCacheEntries(this.nodeDataCache, Math.floor(maxNodeEntries * 0.1)); + } + + this.nodeDataCache.set(nodeId, this.createCacheEntry(nodeData)); + } + + /** + * Evict oldest cache entries + * + * @param cache - Cache map to evict from + * @param count - Number of entries to evict + */ + private evictOldestCacheEntries(cache: Map>, count: number): void { + // Sort entries by cachedAt and remove oldest + const entries = Array.from(cache.entries()) + .sort((a, b) => a[1].cachedAt - b[1].cachedAt); + + for (let i = 0; i < Math.min(count, entries.length); i++) { + cache.delete(entries[i][0]); + } + } + + /** + * Log a message with service context + * + * @param message - Message to log + * @param level - Log level (info, warn, error) + */ + private log(message: string, level: "info" | "warn" | "error" = "info"): void { + const prefix = "[HieraService]"; + switch (level) { + case "warn": + console.warn(prefix, message); + break; + case "error": + console.error(prefix, message); + break; + default: + // eslint-disable-next-line no-console + console.log(prefix, message); + } + } + + /** + * Get the CatalogCompiler instance + */ + getCatalogCompiler(): CatalogCompiler | null { + return this.catalogCompiler; + } + + /** + * Check if catalog compilation is enabled + */ + isCatalogCompilationEnabled(): boolean { + return this.catalogCompiler?.isEnabled() ?? false; + } + + /** + * Stop the service and clean up resources + */ + async shutdown(): Promise { + this.log("Shutting down HieraService..."); + + // Stop file watching + this.scanner.stopWatching(); + + // Clear all caches + this.invalidateCache(); + + // Clear catalog compiler cache + if (this.catalogCompiler) { + this.catalogCompiler.clearCache(); + } + + this.initialized = false; + this.log("HieraService shut down"); + } +} diff --git a/backend/src/integrations/hiera/PuppetfileParser.ts b/backend/src/integrations/hiera/PuppetfileParser.ts new file mode 100644 index 0000000..57b58f8 --- /dev/null +++ b/backend/src/integrations/hiera/PuppetfileParser.ts @@ -0,0 +1,464 @@ +/** + * PuppetfileParser + * + * Parses Puppetfile to extract module dependencies with versions and sources. + * Supports both Puppet Forge modules and Git-based modules. + * + * Requirements: 10.1, 10.5 + */ + +import * as fs from "fs"; +import type { ModuleUpdate } from "./types"; + +/** + * Parsed module information from Puppetfile + */ +export interface ParsedModule { + name: string; + version: string; + source: "forge" | "git"; + forgeSlug?: string; + gitUrl?: string; + gitRef?: string; + gitTag?: string; + gitBranch?: string; + gitCommit?: string; + line: number; +} + +/** + * Puppetfile parse result + */ +export interface PuppetfileParseResult { + success: boolean; + modules: ParsedModule[]; + forgeUrl?: string; + moduledir?: string; + errors: PuppetfileParseError[]; + warnings: string[]; +} + +/** + * Puppetfile parse error + */ +export interface PuppetfileParseError { + message: string; + line?: number; + column?: number; + suggestion?: string; +} + +/** + * PuppetfileParser class for parsing Puppetfile module declarations + */ +export class PuppetfileParser { + /** + * Parse a Puppetfile from a file path + * + * @param filePath - Path to the Puppetfile + * @returns Parse result with modules and any errors + */ + parseFile(filePath: string): PuppetfileParseResult { + let content: string; + + try { + content = fs.readFileSync(filePath, "utf-8"); + } catch (error) { + return { + success: false, + modules: [], + errors: [ + { + message: `Failed to read Puppetfile: ${this.getErrorMessage(error)}`, + suggestion: "Ensure the Puppetfile exists and is readable", + }, + ], + warnings: [], + }; + } + + return this.parse(content); + } + + /** + * Parse Puppetfile content + * + * @param content - Puppetfile content as string + * @returns Parse result with modules and any errors + */ + parse(content: string): PuppetfileParseResult { + const modules: ParsedModule[] = []; + const errors: PuppetfileParseError[] = []; + const warnings: string[] = []; + let forgeUrl: string | undefined; + let moduledir: string | undefined; + + const lines = content.split("\n"); + let currentModuleLines: string[] = []; + let currentModuleStartLine = 0; + let inMultilineModule = false; + + for (let i = 0; i < lines.length; i++) { + const lineNumber = i + 1; + const line = lines[i]; + const trimmedLine = line.trim(); + + // Skip empty lines and comments + if (trimmedLine === "" || trimmedLine.startsWith("#")) { + continue; + } + + // Parse forge directive + const forgeMatch = trimmedLine.match(/^forge\s+['"]([^'"]+)['"]/); + if (forgeMatch) { + forgeUrl = forgeMatch[1]; + continue; + } + + // Parse moduledir directive + const moduledirMatch = trimmedLine.match(/^moduledir\s+['"]([^'"]+)['"]/); + if (moduledirMatch) { + moduledir = moduledirMatch[1]; + continue; + } + + // Handle multi-line module declarations + if (inMultilineModule) { + currentModuleLines.push(line); + // Check if this line ends the module declaration + if (!this.isLineContinued(line)) { + const moduleResult = this.parseModuleDeclaration( + currentModuleLines.join("\n"), + currentModuleStartLine + ); + if (moduleResult.module) { + modules.push(moduleResult.module); + } + if (moduleResult.error) { + errors.push(moduleResult.error); + } + if (moduleResult.warning) { + warnings.push(moduleResult.warning); + } + currentModuleLines = []; + inMultilineModule = false; + } + continue; + } + + // Check for mod declaration start + if (trimmedLine.startsWith("mod ") || trimmedLine.startsWith("mod(")) { + currentModuleStartLine = lineNumber; + currentModuleLines = [line]; + + // Check if this is a multi-line declaration + if (this.isLineContinued(line)) { + inMultilineModule = true; + } else { + const moduleResult = this.parseModuleDeclaration(line, lineNumber); + if (moduleResult.module) { + modules.push(moduleResult.module); + } + if (moduleResult.error) { + errors.push(moduleResult.error); + } + if (moduleResult.warning) { + warnings.push(moduleResult.warning); + } + currentModuleLines = []; + } + continue; + } + + // Unknown directive - add warning + if (trimmedLine.length > 0 && !trimmedLine.startsWith("mod")) { + warnings.push(`Unknown directive at line ${lineNumber}: ${trimmedLine.substring(0, 50)}`); + } + } + + // Handle unclosed multi-line module + if (inMultilineModule && currentModuleLines.length > 0) { + errors.push({ + message: "Unclosed module declaration", + line: currentModuleStartLine, + suggestion: "Ensure all module declarations are properly closed", + }); + } + + return { + success: errors.length === 0, + modules, + forgeUrl, + moduledir, + errors, + warnings, + }; + } + + /** + * Check if a line continues to the next line + */ + private isLineContinued(line: string): boolean { + const trimmed = line.trim(); + // Line continues if it ends with comma, backslash, or has unclosed braces/parens + if (trimmed.endsWith(",") || trimmed.endsWith("\\")) { + return true; + } + // Check for unclosed hash/array + const openBraces = (trimmed.match(/{/g) || []).length; + const closeBraces = (trimmed.match(/}/g) || []).length; + if (openBraces > closeBraces) { + return true; + } + return false; + } + + /** + * Parse a single module declaration + */ + private parseModuleDeclaration( + declaration: string, + lineNumber: number + ): { module?: ParsedModule; error?: PuppetfileParseError; warning?: string } { + // Normalize the declaration (remove newlines, extra spaces) + const normalized = declaration.replace(/\s+/g, " ").trim(); + + // Try to parse as simple forge module: mod 'author/name', 'version' + const simpleForgeMatch = normalized.match( + /^mod\s+['"]([^'"]+)['"]\s*,\s*['"]([^'"]+)['"]\s*$/ + ); + if (simpleForgeMatch) { + const moduleName = simpleForgeMatch[1]; + const version = simpleForgeMatch[2]; + return { + module: { + name: this.normalizeModuleName(moduleName), + version, + source: "forge", + forgeSlug: moduleName, + line: lineNumber, + }, + }; + } + + // Try to parse as forge module without version: mod 'author/name' + const forgeNoVersionMatch = normalized.match(/^mod\s+['"]([^'"]+)['"]\s*$/); + if (forgeNoVersionMatch) { + const moduleName = forgeNoVersionMatch[1]; + return { + module: { + name: this.normalizeModuleName(moduleName), + version: "latest", + source: "forge", + forgeSlug: moduleName, + line: lineNumber, + }, + warning: `Module '${moduleName}' at line ${lineNumber} has no version specified`, + }; + } + + // Try to parse as git module: mod 'name', :git => 'url', ... + const gitMatch = normalized.match( + /^mod\s+['"]([^'"]+)['"]\s*,\s*:git\s*=>\s*['"]([^'"]+)['"]/ + ); + if (gitMatch) { + const moduleName = gitMatch[1]; + const gitUrl = gitMatch[2]; + + // Extract git ref options + const tagMatch = normalized.match(/:tag\s*=>\s*['"]([^'"]+)['"]/); + const branchMatch = normalized.match(/:branch\s*=>\s*['"]([^'"]+)['"]/); + const refMatch = normalized.match(/:ref\s*=>\s*['"]([^'"]+)['"]/); + const commitMatch = normalized.match(/:commit\s*=>\s*['"]([^'"]+)['"]/); + + const version = tagMatch?.[1] || branchMatch?.[1] || refMatch?.[1] || commitMatch?.[1] || "HEAD"; + + return { + module: { + name: moduleName, + version, + source: "git", + gitUrl, + gitTag: tagMatch?.[1], + gitBranch: branchMatch?.[1], + gitRef: refMatch?.[1], + gitCommit: commitMatch?.[1], + line: lineNumber, + }, + }; + } + + // Try to parse as local module: mod 'name', :local => true + const localMatch = normalized.match( + /^mod\s+['"]([^'"]+)['"]\s*,\s*:local\s*=>\s*true/ + ); + if (localMatch) { + return { + module: { + name: localMatch[1], + version: "local", + source: "git", // Treat local as git-like (not from forge) + line: lineNumber, + }, + }; + } + + // Could not parse the module declaration + return { + error: { + message: `Failed to parse module declaration: ${normalized.substring(0, 100)}`, + line: lineNumber, + suggestion: "Check the module declaration syntax", + }, + }; + } + + /** + * Normalize module name to consistent format + * Converts 'author-name' to 'author/name' + */ + private normalizeModuleName(name: string): string { + // If already has slash, return as-is + if (name.includes("/")) { + return name; + } + // Convert hyphen to slash for author-module format + const parts = name.split("-"); + if (parts.length >= 2) { + return `${parts[0]}/${parts.slice(1).join("-")}`; + } + return name; + } + + /** + * Convert parsed modules to ModuleUpdate format + */ + toModuleUpdates(modules: ParsedModule[]): ModuleUpdate[] { + return modules.map((mod) => ({ + name: mod.name, + currentVersion: mod.version, + latestVersion: mod.version, // Will be updated by update detection + source: mod.source, + hasSecurityAdvisory: false, // Will be updated by security check + })); + } + + /** + * Get a formatted error summary from parse result + * + * @param result - Parse result + * @returns Formatted error message or null if no errors + */ + getErrorSummary(result: PuppetfileParseResult): string | null { + if (result.success && result.errors.length === 0) { + return null; + } + + const errorMessages = result.errors.map((err) => { + let msg = err.message; + if (err.line) { + msg = `Line ${err.line}: ${msg}`; + } + if (err.suggestion) { + msg += ` (${err.suggestion})`; + } + return msg; + }); + + return `Puppetfile parse errors:\n${errorMessages.join("\n")}`; + } + + /** + * Validate a Puppetfile and return detailed validation result + * + * @param filePath - Path to the Puppetfile + * @returns Validation result with detailed error information + */ + validate(filePath: string): PuppetfileValidationResult { + const parseResult = this.parseFile(filePath); + + const issues: PuppetfileValidationIssue[] = []; + + // Convert errors to issues + for (const error of parseResult.errors) { + issues.push({ + severity: "error", + message: error.message, + line: error.line, + column: error.column, + suggestion: error.suggestion, + }); + } + + // Convert warnings to issues + for (const warning of parseResult.warnings) { + // Extract line number from warning if present + const lineMatch = warning.match(/line (\d+)/i); + issues.push({ + severity: "warning", + message: warning, + line: lineMatch ? parseInt(lineMatch[1], 10) : undefined, + }); + } + + // Add additional validation checks + for (const mod of parseResult.modules) { + // Check for modules without version pinning + if (mod.version === "latest") { + issues.push({ + severity: "warning", + message: `Module '${mod.name}' has no version pinned`, + line: mod.line, + suggestion: "Pin module versions for reproducible builds", + }); + } + + // Check for git modules without specific ref + if (mod.source === "git" && mod.version === "HEAD") { + issues.push({ + severity: "warning", + message: `Git module '${mod.name}' has no tag, branch, or commit specified`, + line: mod.line, + suggestion: "Specify a tag, branch, or commit for reproducible builds", + }); + } + } + + return { + valid: parseResult.success && issues.filter((i) => i.severity === "error").length === 0, + modules: parseResult.modules, + issues, + forgeUrl: parseResult.forgeUrl, + moduledir: parseResult.moduledir, + }; + } + + /** + * Extract error message from unknown error + */ + private getErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error); + } +} + +/** + * Puppetfile validation issue + */ +export interface PuppetfileValidationIssue { + severity: "error" | "warning" | "info"; + message: string; + line?: number; + column?: number; + suggestion?: string; +} + +/** + * Puppetfile validation result + */ +export interface PuppetfileValidationResult { + valid: boolean; + modules: ParsedModule[]; + issues: PuppetfileValidationIssue[]; + forgeUrl?: string; + moduledir?: string; +} diff --git a/backend/src/integrations/hiera/index.ts b/backend/src/integrations/hiera/index.ts new file mode 100644 index 0000000..33a955b --- /dev/null +++ b/backend/src/integrations/hiera/index.ts @@ -0,0 +1,60 @@ +/** + * Hiera Integration Module + * + * Exports all Hiera integration components for local Puppet control repository + * analysis, Hiera data lookup, and code analysis. + */ + +// Export all types +export * from "./types"; + +// Export HieraParser +export { HieraParser } from "./HieraParser"; +export type { HieraParseResult, ValidationResult, DataBackend, BackendInfo } from "./HieraParser"; + +// Export FactService +export { FactService } from "./FactService"; + +// Export HieraScanner +export { HieraScanner } from "./HieraScanner"; +export type { FileScanResult, FileChangeCallback } from "./HieraScanner"; + +// Export HieraResolver +export { HieraResolver } from "./HieraResolver"; +export type { CatalogAwareResolveOptions } from "./HieraResolver"; + +// Export HieraService +export { HieraService } from "./HieraService"; +export type { HieraServiceConfig } from "./HieraService"; + +// Export CatalogCompiler +export { CatalogCompiler } from "./CatalogCompiler"; +export type { CompiledCatalogResult } from "./CatalogCompiler"; + +// Export CodeAnalyzer +export { CodeAnalyzer } from "./CodeAnalyzer"; +export type { LintFilterOptions, IssueCounts } from "./CodeAnalyzer"; + +// Export PuppetfileParser +export { PuppetfileParser } from "./PuppetfileParser"; +export type { + ParsedModule, + PuppetfileParseResult, + PuppetfileParseError, + PuppetfileValidationIssue, + PuppetfileValidationResult, +} from "./PuppetfileParser"; + +// Export ForgeClient +export { ForgeClient } from "./ForgeClient"; +export type { + ForgeModuleInfo, + ForgeApiError, + ModuleUpdateCheckResult, + ForgeClientConfig, + SecurityAdvisory, + ModuleSecurityStatus, +} from "./ForgeClient"; + +// Export HieraPlugin +export { HieraPlugin } from "./HieraPlugin"; diff --git a/backend/src/integrations/hiera/types.ts b/backend/src/integrations/hiera/types.ts new file mode 100644 index 0000000..444af9e --- /dev/null +++ b/backend/src/integrations/hiera/types.ts @@ -0,0 +1,527 @@ +/** + * Hiera Integration Data Types + * + * Type definitions for Hiera data lookup, resolution, and code analysis. + */ + +// ============================================================================ +// Hiera Configuration Types +// ============================================================================ + +/** + * Hiera 5 configuration structure + */ +export interface HieraConfig { + version: 5; + defaults?: HieraDefaults; + hierarchy: HierarchyLevel[]; +} + +/** + * Default settings for Hiera hierarchy + */ +export interface HieraDefaults { + datadir?: string; + data_hash?: string; + lookup_key?: string; + options?: Record; +} + +/** + * A single level in the Hiera hierarchy + */ +export interface HierarchyLevel { + name: string; + path?: string; + paths?: string[]; + glob?: string; + globs?: string[]; + datadir?: string; + data_hash?: string; + lookup_key?: string; + mapped_paths?: [string, string, string]; + options?: Record; +} + +/** + * Lookup options for Hiera keys + */ +export interface LookupOptions { + merge?: LookupMethod; + convert_to?: "Array" | "Hash"; + knockout_prefix?: string; +} + +/** + * Hiera lookup methods + */ +export type LookupMethod = "first" | "unique" | "hash" | "deep"; + +// ============================================================================ +// Hiera Key Types +// ============================================================================ + +/** + * A Hiera key with all its locations + */ +export interface HieraKey { + name: string; + locations: HieraKeyLocation[]; + lookupOptions?: LookupOptions; +} + +/** + * Location where a Hiera key is defined + */ +export interface HieraKeyLocation { + file: string; + hierarchyLevel: string; + lineNumber: number; + value: unknown; +} + +/** + * Index of all discovered Hiera keys + */ +export interface HieraKeyIndex { + keys: Map; + files: Map; + lastScan: string; + totalKeys: number; + totalFiles: number; +} + +/** + * Information about a scanned hieradata file + */ +export interface HieraFileInfo { + path: string; + hierarchyLevel: string; + keys: string[]; + lastModified: string; +} + +// ============================================================================ +// Hiera Resolution Types +// ============================================================================ + +/** + * Result of resolving a Hiera key + */ +export interface HieraResolution { + key: string; + resolvedValue: unknown; + lookupMethod: LookupMethod; + sourceFile: string; + hierarchyLevel: string; + allValues: HieraKeyLocation[]; + interpolatedVariables?: Record; + found: boolean; +} + +/** + * Options for resolving Hiera keys + */ +export interface ResolveOptions { + lookupMethod?: LookupMethod; + defaultValue?: unknown; + mergeOptions?: MergeOptions; +} + +/** + * Options for merge operations + */ +export interface MergeOptions { + strategy: LookupMethod; + knockoutPrefix?: string; + sortMergedArrays?: boolean; + mergeHashArrays?: boolean; +} + +/** + * Hiera data for a specific node + */ +export interface NodeHieraData { + nodeId: string; + facts: Facts; + keys: Map; + usedKeys: Set; + unusedKeys: Set; +} + +/** + * Key values across multiple nodes + */ +export interface KeyNodeValues { + nodeId: string; + value: unknown; + sourceFile: string; + hierarchyLevel: string; + found: boolean; +} + +/** + * Map of key usage by node + */ +export type KeyUsageMap = Map; + +// ============================================================================ +// Fact Types +// ============================================================================ + +/** + * Facts for a node + */ +export interface Facts { + nodeId: string; + gatheredAt: string; + facts: Record; +} + +/** + * Result of fetching facts + */ +export interface FactResult { + facts: Facts; + source: "puppetdb" | "local"; + warnings?: string[]; +} + +/** + * Local fact file format (Puppetserver format) + */ +export interface LocalFactFile { + name: string; + values: Record; +} + +// ============================================================================ +// Code Analysis Types +// ============================================================================ + +/** + * Complete code analysis result + */ +export interface CodeAnalysisResult { + unusedCode: UnusedCodeReport; + lintIssues: LintIssue[]; + moduleUpdates: ModuleUpdate[]; + statistics: UsageStatistics; + analyzedAt: string; +} + +/** + * Report of unused code items + */ +export interface UnusedCodeReport { + unusedClasses: UnusedItem[]; + unusedDefinedTypes: UnusedItem[]; + unusedHieraKeys: UnusedItem[]; +} + +/** + * An unused code item + */ +export interface UnusedItem { + name: string; + file: string; + line: number; + type: "class" | "defined_type" | "hiera_key"; +} + +/** + * A lint issue found in Puppet code + */ +export interface LintIssue { + file: string; + line: number; + column: number; + severity: LintSeverity; + message: string; + rule: string; + fixable: boolean; +} + +/** + * Lint issue severity levels + */ +export type LintSeverity = "error" | "warning" | "info"; + +/** + * Module update information + */ +export interface ModuleUpdate { + name: string; + currentVersion: string; + latestVersion: string; + source: "forge" | "git"; + hasSecurityAdvisory: boolean; + changelog?: string; +} + +/** + * Usage statistics for the codebase + */ +export interface UsageStatistics { + totalManifests: number; + totalClasses: number; + totalDefinedTypes: number; + totalFunctions: number; + linesOfCode: number; + mostUsedClasses: ClassUsage[]; + mostUsedResources: ResourceUsage[]; +} + +/** + * Class usage information + */ +export interface ClassUsage { + name: string; + usageCount: number; + nodes: string[]; +} + +/** + * Resource usage information + */ +export interface ResourceUsage { + type: string; + count: number; +} + +// ============================================================================ +// API Types +// ============================================================================ + +/** + * API response for key list + */ +export interface KeyListResponse { + keys: HieraKeyInfo[]; + total: number; + page?: number; + pageSize?: number; +} + +/** + * Simplified key info for API responses + */ +export interface HieraKeyInfo { + name: string; + locationCount: number; + hasLookupOptions: boolean; +} + +/** + * API response for key search + */ +export interface KeySearchResponse { + keys: HieraKeyInfo[]; + query: string; + total: number; +} + +/** + * API response for key details + */ +export interface KeyDetailResponse { + key: HieraKey; +} + +/** + * API response for node Hiera data + */ +export interface NodeHieraDataResponse { + nodeId: string; + keys: HieraResolutionInfo[]; + usedKeys: string[]; + unusedKeys: string[]; + factSource: "puppetdb" | "local"; + warnings?: string[]; +} + +/** + * Simplified resolution info for API responses + */ +export interface HieraResolutionInfo { + key: string; + resolvedValue: unknown; + lookupMethod: LookupMethod; + sourceFile: string; + hierarchyLevel: string; + found: boolean; +} + +/** + * API response for global key lookup + */ +export interface GlobalKeyLookupResponse { + key: string; + nodes: KeyNodeValues[]; + groupedByValue: ValueGroup[]; +} + +/** + * Group of nodes with the same value + */ +export interface ValueGroup { + value: unknown; + nodes: string[]; +} + +/** + * API response for code analysis + */ +export interface CodeAnalysisResponse { + unusedCode: UnusedCodeReport; + lintIssues: LintIssue[]; + moduleUpdates: ModuleUpdate[]; + statistics: UsageStatistics; + analyzedAt: string; +} + +/** + * API response for integration status + */ +export interface HieraStatusResponse { + enabled: boolean; + configured: boolean; + healthy: boolean; + controlRepoPath?: string; + lastScan?: string; + keyCount?: number; + fileCount?: number; + errors?: string[]; + warnings?: string[]; +} + +/** + * Pagination parameters + */ +export interface PaginationParams { + page?: number; + pageSize?: number; +} + +/** + * Paginated response wrapper + */ +export interface PaginatedResponse { + data: T[]; + total: number; + page: number; + pageSize: number; + totalPages: number; +} + +// ============================================================================ +// Error Types +// ============================================================================ + +/** + * Hiera error codes + */ +export const HIERA_ERROR_CODES = { + NOT_CONFIGURED: "HIERA_NOT_CONFIGURED", + INVALID_PATH: "HIERA_INVALID_PATH", + PARSE_ERROR: "HIERA_PARSE_ERROR", + RESOLUTION_ERROR: "HIERA_RESOLUTION_ERROR", + FACTS_UNAVAILABLE: "HIERA_FACTS_UNAVAILABLE", + CATALOG_COMPILATION_FAILED: "HIERA_CATALOG_COMPILATION_FAILED", + ANALYSIS_ERROR: "HIERA_ANALYSIS_ERROR", + FORGE_UNAVAILABLE: "HIERA_FORGE_UNAVAILABLE", +} as const; + +export type HieraErrorCode = + (typeof HIERA_ERROR_CODES)[keyof typeof HIERA_ERROR_CODES]; + +/** + * Hiera error structure + */ +export interface HieraError { + code: HieraErrorCode; + message: string; + details?: { + file?: string; + line?: number; + suggestion?: string; + }; +} + +// ============================================================================ +// Configuration Types +// ============================================================================ + +/** + * Fact source configuration + */ +export interface FactSourceConfig { + preferPuppetDB: boolean; + localFactsPath?: string; +} + +/** + * Catalog compilation configuration + */ +export interface CatalogCompilationConfig { + enabled: boolean; + timeout: number; + cacheTTL: number; +} + +/** + * Hiera cache configuration + */ +export interface HieraCacheConfig { + enabled: boolean; + ttl: number; + maxEntries: number; +} + +/** + * Code analysis configuration + */ +export interface CodeAnalysisConfig { + enabled: boolean; + lintEnabled: boolean; + moduleUpdateCheck: boolean; + analysisInterval: number; + exclusionPatterns?: string[]; +} + +/** + * Complete Hiera plugin configuration + */ +export interface HieraPluginConfig { + enabled: boolean; + controlRepoPath: string; + hieraConfigPath: string; + environments: string[]; + factSources: FactSourceConfig; + catalogCompilation: CatalogCompilationConfig; + cache: HieraCacheConfig; + codeAnalysis: CodeAnalysisConfig; +} + +// ============================================================================ +// Health Check Types +// ============================================================================ + +/** + * Health status for the Hiera integration + */ +export interface HieraHealthStatus { + healthy: boolean; + status: "connected" | "error" | "not_configured"; + message?: string; + details?: { + controlRepoAccessible: boolean; + hieraConfigValid: boolean; + factSourceAvailable: boolean; + lastScanTime?: string; + keyCount?: number; + fileCount?: number; + }; + errors?: string[]; + warnings?: string[]; +} diff --git a/backend/src/routes/hiera.ts b/backend/src/routes/hiera.ts new file mode 100644 index 0000000..f272c6b --- /dev/null +++ b/backend/src/routes/hiera.ts @@ -0,0 +1,939 @@ +/** + * Hiera API Routes + * + * REST API endpoints for Hiera data lookup, key resolution, and code analysis. + * + * Requirements: 14.1-14.6, 13.2, 15.6 + */ + +import { Router, type Request, type Response } from "express"; +import { z } from "zod"; +import type { IntegrationManager } from "../integrations/IntegrationManager"; +import type { HieraPlugin } from "../integrations/hiera/HieraPlugin"; +import { + HIERA_ERROR_CODES, + type HieraKeyInfo, + type HieraResolutionInfo, + type PaginatedResponse, +} from "../integrations/hiera/types"; +import { asyncHandler } from "./asyncHandler"; + +/** + * Request validation schemas + */ +const KeyNameParamSchema = z.object({ + key: z.string().min(1, "Key name is required"), +}); + +const NodeIdParamSchema = z.object({ + nodeId: z.string().min(1, "Node ID is required"), +}); + +const NodeKeyParamSchema = z.object({ + nodeId: z.string().min(1, "Node ID is required"), + key: z.string().min(1, "Key name is required"), +}); + +const SearchQuerySchema = z.object({ + q: z.string().optional(), + query: z.string().optional(), +}); + +const PaginationQuerySchema = z.object({ + page: z + .string() + .optional() + .transform((val) => (val ? parseInt(val, 10) : 1)), + pageSize: z + .string() + .optional() + .transform((val) => (val ? Math.min(parseInt(val, 10), 100) : 50)), +}); + +const LintFilterQuerySchema = z.object({ + severity: z + .string() + .optional() + .transform((val) => (val ? val.split(",") : undefined)), + types: z + .string() + .optional() + .transform((val) => (val ? val.split(",") : undefined)), +}); + +const KeyFilterQuerySchema = z.object({ + filter: z.enum(["used", "unused", "all"]).optional().default("all"), +}); + +/** + * Helper to get HieraPlugin from IntegrationManager + */ +function getHieraPlugin(integrationManager: IntegrationManager): HieraPlugin | null { + const plugins = integrationManager.getAllPlugins(); + const hieraRegistration = plugins.find((p) => p.plugin.name === "hiera"); + + if (!hieraRegistration) { + return null; + } + + return hieraRegistration.plugin as HieraPlugin; +} + +/** + * Helper to check if Hiera integration is configured and initialized + */ +function checkHieraAvailability( + hieraPlugin: HieraPlugin | null, + res: Response +): hieraPlugin is HieraPlugin { + if (!hieraPlugin) { + res.status(503).json({ + error: { + code: HIERA_ERROR_CODES.NOT_CONFIGURED, + message: "Hiera integration is not configured", + details: { + suggestion: "Configure the Hiera integration by setting HIERA_CONTROL_REPO_PATH environment variable", + }, + }, + }); + return false; + } + + if (!hieraPlugin.isInitialized()) { + res.status(503).json({ + error: { + code: HIERA_ERROR_CODES.NOT_CONFIGURED, + message: "Hiera integration is not initialized", + details: { + suggestion: "Check the server logs for initialization errors", + }, + }, + }); + return false; + } + + if (!hieraPlugin.isEnabled()) { + res.status(503).json({ + error: { + code: HIERA_ERROR_CODES.NOT_CONFIGURED, + message: "Hiera integration is disabled", + details: { + suggestion: "Enable the Hiera integration in the configuration", + }, + }, + }); + return false; + } + + return true; +} + +/** + * Apply pagination to an array + */ +function paginate( + items: T[], + page: number, + pageSize: number +): PaginatedResponse { + const total = items.length; + const totalPages = Math.ceil(total / pageSize); + const startIndex = (page - 1) * pageSize; + const endIndex = startIndex + pageSize; + const data = items.slice(startIndex, endIndex); + + return { + data, + total, + page, + pageSize, + totalPages, + }; +} + +/** + * Create Hiera API router + * + * @param integrationManager - IntegrationManager instance + * @returns Express router + */ +export function createHieraRouter(integrationManager: IntegrationManager): Router { + const router = Router(); + + + // ============================================================================ + // Status and Reload Endpoints (18.6) + // ============================================================================ + + /** + * GET /api/integrations/hiera/status + * Return status of the Hiera integration + * + * Requirements: 13.2 + */ + router.get( + "/status", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!hieraPlugin) { + res.json({ + enabled: false, + configured: false, + healthy: false, + message: "Hiera integration is not configured", + }); + return; + } + + const healthStatus = await hieraPlugin.healthCheck(); + const hieraConfig = hieraPlugin.getHieraConfig(); + const validationResult = hieraPlugin.getValidationResult(); + + res.json({ + enabled: hieraPlugin.isEnabled(), + configured: true, + healthy: healthStatus.healthy, + controlRepoPath: hieraConfig?.controlRepoPath, + lastScan: healthStatus.details?.lastScanTime as string | undefined, + keyCount: healthStatus.details?.keyCount as number | undefined, + fileCount: healthStatus.details?.fileCount as number | undefined, + message: healthStatus.message, + errors: validationResult?.errors, + warnings: validationResult?.warnings, + structure: validationResult?.structure, + }); + }) + ); + + /** + * POST /api/integrations/hiera/reload + * Reload control repository data + * + * Requirements: 1.6, 13.2 + */ + router.post( + "/reload", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + await hieraPlugin.reload(); + + const healthStatus = await hieraPlugin.healthCheck(); + + res.json({ + success: true, + message: "Control repository reloaded successfully", + keyCount: healthStatus.details?.keyCount as number | undefined, + fileCount: healthStatus.details?.fileCount as number | undefined, + lastScan: healthStatus.details?.lastScanTime as string | undefined, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.PARSE_ERROR, + message: `Failed to reload control repository: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + // ============================================================================ + // Key Discovery Endpoints (18.2) + // ============================================================================ + + /** + * GET /api/integrations/hiera/keys + * Return all discovered Hiera keys + * + * Requirements: 14.1, 15.6 + */ + router.get( + "/keys", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const paginationParams = PaginationQuerySchema.parse(req.query); + const keyIndex = await hieraPlugin.getAllKeys(); + + // Convert Map to array of HieraKeyInfo + const keysArray: HieraKeyInfo[] = []; + for (const [name, key] of keyIndex.keys) { + keysArray.push({ + name, + locationCount: key.locations.length, + hasLookupOptions: !!key.lookupOptions, + }); + } + + // Sort alphabetically + keysArray.sort((a, b) => a.name.localeCompare(b.name)); + + // Apply pagination + const paginatedResult = paginate( + keysArray, + paginationParams.page, + paginationParams.pageSize + ); + + res.json({ + keys: paginatedResult.data, + total: paginatedResult.total, + page: paginatedResult.page, + pageSize: paginatedResult.pageSize, + totalPages: paginatedResult.totalPages, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to get Hiera keys: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/keys/search + * Search for Hiera keys by partial name + * + * Requirements: 14.1, 4.5, 7.4 + */ + router.get( + "/keys/search", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const searchParams = SearchQuerySchema.parse(req.query); + const paginationParams = PaginationQuerySchema.parse(req.query); + const query = searchParams.q || searchParams.query || ""; + + const hieraService = hieraPlugin.getHieraService(); + const matchingKeys = await hieraService.searchKeys(query); + + // Convert to HieraKeyInfo array + const keysArray: HieraKeyInfo[] = matchingKeys.map((key) => ({ + name: key.name, + locationCount: key.locations.length, + hasLookupOptions: !!key.lookupOptions, + })); + + // Apply pagination + const paginatedResult = paginate( + keysArray, + paginationParams.page, + paginationParams.pageSize + ); + + res.json({ + keys: paginatedResult.data, + query, + total: paginatedResult.total, + page: paginatedResult.page, + pageSize: paginatedResult.pageSize, + totalPages: paginatedResult.totalPages, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to search Hiera keys: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/keys/:key + * Get details for a specific Hiera key + * + * Requirements: 14.1 + */ + router.get( + "/keys/:key", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const params = KeyNameParamSchema.parse(req.params); + const hieraService = hieraPlugin.getHieraService(); + const key = await hieraService.getKey(params.key); + + if (!key) { + res.status(404).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Key '${params.key}' not found`, + }, + }); + return; + } + + res.json({ + key: { + name: key.name, + locations: key.locations, + lookupOptions: key.lookupOptions, + }, + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: { + code: "INVALID_REQUEST", + message: "Invalid key parameter", + details: error.errors, + }, + }); + return; + } + + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to get key details: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + + // ============================================================================ + // Node-Specific Endpoints (18.3) + // ============================================================================ + + /** + * GET /api/integrations/hiera/nodes/:nodeId/data + * Get all Hiera data for a specific node + * + * Requirements: 14.3, 6.2, 6.6 + */ + router.get( + "/nodes/:nodeId/data", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const params = NodeIdParamSchema.parse(req.params); + const filterParams = KeyFilterQuerySchema.parse(req.query); + const hieraService = hieraPlugin.getHieraService(); + + const nodeData = await hieraService.getNodeHieraData(params.nodeId); + + // Convert Map to array of resolution info + let keysArray: HieraResolutionInfo[] = []; + for (const [, resolution] of nodeData.keys) { + keysArray.push({ + key: resolution.key, + resolvedValue: resolution.resolvedValue, + lookupMethod: resolution.lookupMethod, + sourceFile: resolution.sourceFile, + hierarchyLevel: resolution.hierarchyLevel, + found: resolution.found, + }); + } + + // Apply filter + if (filterParams.filter === "used") { + keysArray = keysArray.filter((k) => nodeData.usedKeys.has(k.key)); + } else if (filterParams.filter === "unused") { + keysArray = keysArray.filter((k) => nodeData.unusedKeys.has(k.key)); + } + + // Sort alphabetically + keysArray.sort((a, b) => a.key.localeCompare(b.key)); + + // Get fact source info + const factService = hieraService.getFactService(); + const factSource = await factService.getFactSource(params.nodeId); + + res.json({ + nodeId: nodeData.nodeId, + keys: keysArray, + usedKeys: Array.from(nodeData.usedKeys), + unusedKeys: Array.from(nodeData.unusedKeys), + factSource, + totalKeys: keysArray.length, + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: { + code: "INVALID_REQUEST", + message: "Invalid request parameters", + details: error.errors, + }, + }); + return; + } + + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to get node Hiera data: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/nodes/:nodeId/keys + * Get all Hiera keys for a specific node (with resolved values) + * + * Requirements: 14.2, 15.6 + */ + router.get( + "/nodes/:nodeId/keys", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const params = NodeIdParamSchema.parse(req.params); + const paginationParams = PaginationQuerySchema.parse(req.query); + const filterParams = KeyFilterQuerySchema.parse(req.query); + const hieraService = hieraPlugin.getHieraService(); + + const nodeData = await hieraService.getNodeHieraData(params.nodeId); + + // Convert Map to array of resolution info + let keysArray: HieraResolutionInfo[] = []; + for (const [, resolution] of nodeData.keys) { + keysArray.push({ + key: resolution.key, + resolvedValue: resolution.resolvedValue, + lookupMethod: resolution.lookupMethod, + sourceFile: resolution.sourceFile, + hierarchyLevel: resolution.hierarchyLevel, + found: resolution.found, + }); + } + + // Apply filter + if (filterParams.filter === "used") { + keysArray = keysArray.filter((k) => nodeData.usedKeys.has(k.key)); + } else if (filterParams.filter === "unused") { + keysArray = keysArray.filter((k) => nodeData.unusedKeys.has(k.key)); + } + + // Sort alphabetically + keysArray.sort((a, b) => a.key.localeCompare(b.key)); + + // Apply pagination + const paginatedResult = paginate( + keysArray, + paginationParams.page, + paginationParams.pageSize + ); + + res.json({ + nodeId: params.nodeId, + keys: paginatedResult.data, + total: paginatedResult.total, + page: paginatedResult.page, + pageSize: paginatedResult.pageSize, + totalPages: paginatedResult.totalPages, + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: { + code: "INVALID_REQUEST", + message: "Invalid request parameters", + details: error.errors, + }, + }); + return; + } + + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to get node keys: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/nodes/:nodeId/keys/:key + * Resolve a specific Hiera key for a node + * + * Requirements: 14.2 + */ + router.get( + "/nodes/:nodeId/keys/:key", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const params = NodeKeyParamSchema.parse(req.params); + const hieraService = hieraPlugin.getHieraService(); + + const resolution = await hieraService.resolveKey(params.nodeId, params.key); + + res.json({ + nodeId: params.nodeId, + key: resolution.key, + resolvedValue: resolution.resolvedValue, + lookupMethod: resolution.lookupMethod, + sourceFile: resolution.sourceFile, + hierarchyLevel: resolution.hierarchyLevel, + allValues: resolution.allValues, + interpolatedVariables: resolution.interpolatedVariables, + found: resolution.found, + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: { + code: "INVALID_REQUEST", + message: "Invalid request parameters", + details: error.errors, + }, + }); + return; + } + + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to resolve key: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + + // ============================================================================ + // Global Key Lookup Endpoint (18.4) + // ============================================================================ + + /** + * GET /api/integrations/hiera/keys/:key/nodes + * Get key values across all nodes + * + * Requirements: 14.2, 7.2, 7.3, 7.5, 7.6 + */ + router.get( + "/keys/:key/nodes", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const params = KeyNameParamSchema.parse(req.params); + const paginationParams = PaginationQuerySchema.parse(req.query); + const hieraService = hieraPlugin.getHieraService(); + + // Get key values across all nodes + const keyNodeValues = await hieraService.getKeyValuesAcrossNodes(params.key); + + // Group nodes by value + const groupedByValue = hieraService.groupNodesByValue(keyNodeValues); + + // Apply pagination to the flat list + const paginatedResult = paginate( + keyNodeValues, + paginationParams.page, + paginationParams.pageSize + ); + + res.json({ + key: params.key, + nodes: paginatedResult.data, + groupedByValue, + total: paginatedResult.total, + page: paginatedResult.page, + pageSize: paginatedResult.pageSize, + totalPages: paginatedResult.totalPages, + }); + } catch (error) { + if (error instanceof z.ZodError) { + res.status(400).json({ + error: { + code: "INVALID_REQUEST", + message: "Invalid key parameter", + details: error.errors, + }, + }); + return; + } + + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.RESOLUTION_ERROR, + message: `Failed to get key values across nodes: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + // ============================================================================ + // Code Analysis Endpoints (18.5) + // ============================================================================ + + /** + * GET /api/integrations/hiera/analysis + * Get complete code analysis results + * + * Requirements: 14.4 + */ + router.get( + "/analysis", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const codeAnalyzer = hieraPlugin.getCodeAnalyzer(); + const analysisResult = await codeAnalyzer.analyze(); + + res.json({ + unusedCode: analysisResult.unusedCode, + lintIssues: analysisResult.lintIssues, + moduleUpdates: analysisResult.moduleUpdates, + statistics: analysisResult.statistics, + analyzedAt: analysisResult.analyzedAt, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.ANALYSIS_ERROR, + message: `Failed to get code analysis: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/analysis/unused + * Get unused code report + * + * Requirements: 14.4, 8.1, 8.2, 8.3, 8.4 + */ + router.get( + "/analysis/unused", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const codeAnalyzer = hieraPlugin.getCodeAnalyzer(); + const unusedCode = await codeAnalyzer.getUnusedCode(); + + res.json({ + unusedClasses: unusedCode.unusedClasses, + unusedDefinedTypes: unusedCode.unusedDefinedTypes, + unusedHieraKeys: unusedCode.unusedHieraKeys, + totals: { + classes: unusedCode.unusedClasses.length, + definedTypes: unusedCode.unusedDefinedTypes.length, + hieraKeys: unusedCode.unusedHieraKeys.length, + }, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.ANALYSIS_ERROR, + message: `Failed to get unused code report: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/analysis/lint + * Get lint issues with optional filtering + * + * Requirements: 14.4, 9.1, 9.2, 9.3, 9.4, 9.5 + */ + router.get( + "/analysis/lint", + asyncHandler(async (req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const filterParams = LintFilterQuerySchema.parse(req.query); + const paginationParams = PaginationQuerySchema.parse(req.query); + const codeAnalyzer = hieraPlugin.getCodeAnalyzer(); + + let lintIssues = await codeAnalyzer.getLintIssues(); + + // Apply filters + if (filterParams.severity || filterParams.types) { + lintIssues = codeAnalyzer.filterIssues(lintIssues, { + severity: filterParams.severity as ("error" | "warning" | "info")[] | undefined, + types: filterParams.types, + }); + } + + // Get issue counts + const issueCounts = codeAnalyzer.countIssues(lintIssues); + + // Apply pagination + const paginatedResult = paginate( + lintIssues, + paginationParams.page, + paginationParams.pageSize + ); + + res.json({ + issues: paginatedResult.data, + counts: issueCounts, + total: paginatedResult.total, + page: paginatedResult.page, + pageSize: paginatedResult.pageSize, + totalPages: paginatedResult.totalPages, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.ANALYSIS_ERROR, + message: `Failed to get lint issues: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/analysis/modules + * Get module update information + * + * Requirements: 14.5, 10.1, 10.2, 10.3, 10.4 + */ + router.get( + "/analysis/modules", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const codeAnalyzer = hieraPlugin.getCodeAnalyzer(); + const moduleUpdates = await codeAnalyzer.getModuleUpdates(); + + // Separate modules with updates from up-to-date modules + const modulesWithUpdates = moduleUpdates.filter( + (m) => m.currentVersion !== m.latestVersion + ); + const upToDateModules = moduleUpdates.filter( + (m) => m.currentVersion === m.latestVersion + ); + const modulesWithSecurityAdvisories = moduleUpdates.filter( + (m) => m.hasSecurityAdvisory + ); + + res.json({ + modules: moduleUpdates, + summary: { + total: moduleUpdates.length, + withUpdates: modulesWithUpdates.length, + upToDate: upToDateModules.length, + withSecurityAdvisories: modulesWithSecurityAdvisories.length, + }, + modulesWithUpdates, + modulesWithSecurityAdvisories, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.ANALYSIS_ERROR, + message: `Failed to get module updates: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + /** + * GET /api/integrations/hiera/analysis/statistics + * Get usage statistics + * + * Requirements: 14.4, 11.1, 11.2, 11.3, 11.4, 11.5 + */ + router.get( + "/analysis/statistics", + asyncHandler(async (_req: Request, res: Response): Promise => { + const hieraPlugin = getHieraPlugin(integrationManager); + + if (!checkHieraAvailability(hieraPlugin, res)) { + return; + } + + try { + const codeAnalyzer = hieraPlugin.getCodeAnalyzer(); + const statistics = await codeAnalyzer.getUsageStatistics(); + + res.json({ + statistics, + }); + } catch (error) { + res.status(500).json({ + error: { + code: HIERA_ERROR_CODES.ANALYSIS_ERROR, + message: `Failed to get usage statistics: ${error instanceof Error ? error.message : String(error)}`, + }, + }); + } + }) + ); + + return router; +} diff --git a/backend/src/routes/integrations.ts b/backend/src/routes/integrations.ts index 0e4ef57..9b73795 100644 --- a/backend/src/routes/integrations.ts +++ b/backend/src/routes/integrations.ts @@ -176,6 +176,23 @@ export function createIntegrationsRouter( }); } + // Check if Hiera is not configured + if (!configuredNames.has("hiera")) { + integrations.push({ + name: "hiera", + type: "information", + status: "not_configured", + lastCheck: new Date().toISOString(), + message: "Hiera integration is not configured", + details: { + setupRequired: true, + setupUrl: "/integrations/hiera/setup", + }, + workingCapabilities: undefined, + failingCapabilities: undefined, + }); + } + res.json({ integrations, timestamp: new Date().toISOString(), @@ -540,7 +557,7 @@ export function createIntegrationsRouter( const queryParams = ReportsQuerySchema.parse(req.query); const limit = queryParams.limit || 100; // Default to 100 for summary const hoursValue = req.query.hours; - const hours = typeof hoursValue === 'string' + const hours = typeof hoursValue === 'string' ? parseInt(hoursValue, 10) : undefined; diff --git a/backend/src/server.ts b/backend/src/server.ts index 6214cf5..ad99fcd 100644 --- a/backend/src/server.ts +++ b/backend/src/server.ts @@ -16,12 +16,14 @@ import { createPuppetRouter } from "./routes/puppet"; import { createPackagesRouter } from "./routes/packages"; import { createStreamingRouter } from "./routes/streaming"; import { createIntegrationsRouter } from "./routes/integrations"; +import { createHieraRouter } from "./routes/hiera"; import { StreamingExecutionManager } from "./services/StreamingExecutionManager"; import { ExecutionQueue } from "./services/ExecutionQueue"; import { errorHandler, requestIdMiddleware } from "./middleware"; import { IntegrationManager } from "./integrations/IntegrationManager"; import { PuppetDBService } from "./integrations/puppetdb/PuppetDBService"; import { PuppetserverService } from "./integrations/puppetserver/PuppetserverService"; +import { HieraPlugin } from "./integrations/hiera/HieraPlugin"; import { BoltPlugin } from "./integrations/bolt"; import type { IntegrationConfig } from "./integrations/types"; @@ -300,6 +302,62 @@ async function startServer(): Promise { } console.warn("=== End Puppetserver Integration Setup ==="); + // Initialize Hiera integration only if configured + let hieraPlugin: HieraPlugin | undefined; + const hieraConfig = config.integrations.hiera; + const hieraConfigured = !!hieraConfig?.controlRepoPath; + + console.warn("=== Hiera Integration Setup ==="); + console.warn(`Hiera configured: ${String(hieraConfigured)}`); + console.warn( + `Hiera config: ${JSON.stringify(hieraConfig, null, 2)}`, + ); + + if (hieraConfigured) { + console.warn("Initializing Hiera integration..."); + try { + hieraPlugin = new HieraPlugin(); + hieraPlugin.setIntegrationManager(integrationManager); + console.warn("HieraPlugin instance created"); + + const integrationConfig: IntegrationConfig = { + enabled: hieraConfig.enabled, + name: "hiera", + type: "information", + config: hieraConfig, + priority: 6, // Lower priority than Puppetserver (8), higher than Bolt (5) + }; + + console.warn( + `Registering Hiera plugin with config: ${JSON.stringify(integrationConfig, null, 2)}`, + ); + integrationManager.registerPlugin( + hieraPlugin, + integrationConfig, + ); + + console.warn("Hiera integration registered successfully"); + console.warn(`- Enabled: ${String(hieraConfig.enabled)}`); + console.warn(`- Control Repo Path: ${hieraConfig.controlRepoPath}`); + console.warn(`- Hiera Config Path: ${hieraConfig.hieraConfigPath ?? "hiera.yaml"}`); + console.warn(`- Priority: 6`); + } catch (error) { + console.warn( + `WARNING: Failed to initialize Hiera integration: ${error instanceof Error ? error.message : "Unknown error"}`, + ); + if (error instanceof Error && error.stack) { + console.warn(error.stack); + } + hieraPlugin = undefined; + } + } else { + console.warn( + "Hiera integration not configured - skipping registration", + ); + console.warn("Set HIERA_CONTROL_REPO_PATH to a valid control repository to enable Hiera integration"); + } + console.warn("=== End Hiera Integration Setup ==="); + // Initialize all registered plugins console.warn("=== Initializing All Integration Plugins ==="); console.warn( @@ -494,6 +552,10 @@ async function startServer(): Promise { puppetserverService, ), ); + app.use( + "/api/integrations/hiera", + createHieraRouter(integrationManager), + ); // Serve static frontend files in production const publicPath = path.resolve(__dirname, "..", "public"); diff --git a/backend/test/integration/bolt-plugin-integration.test.ts b/backend/test/integration/bolt-plugin-integration.test.ts index 0347708..db2faa5 100644 --- a/backend/test/integration/bolt-plugin-integration.test.ts +++ b/backend/test/integration/bolt-plugin-integration.test.ts @@ -21,29 +21,29 @@ import type { Node } from "../../src/bolt/types"; async function checkBoltAvailability(): Promise { try { const { spawn } = await import("child_process"); - + return new Promise((resolve) => { const boltCheck = spawn("bolt", ["--version"], { stdio: "pipe" }); - + let resolved = false; - + const handleClose = (code: number | null): void => { if (!resolved) { resolved = true; resolve(code === 0); } }; - + const handleError = (): void => { if (!resolved) { resolved = true; resolve(false); } }; - + boltCheck.on("close", handleClose); boltCheck.on("error", handleError); - + // Timeout after 5 seconds setTimeout(() => { if (!resolved) { @@ -453,7 +453,7 @@ describe("Bolt Plugin Integration", () => { tempManager.registerPlugin(tempPlugin, config); expect(tempManager.getPluginCount()).toBe(1); - + // Check if plugin is actually registered const registeredPlugin = tempManager.getExecutionTool("bolt"); expect(registeredPlugin).not.toBeNull(); diff --git a/backend/test/integration/integration-status.test.ts b/backend/test/integration/integration-status.test.ts index 2a3f94a..21ecb14 100644 --- a/backend/test/integration/integration-status.test.ts +++ b/backend/test/integration/integration-status.test.ts @@ -114,8 +114,8 @@ describe("Integration Status API", () => { expect(response.body).toHaveProperty("integrations"); expect(response.body).toHaveProperty("timestamp"); expect(Array.isArray(response.body.integrations)).toBe(true); - // Now includes unconfigured Puppetserver - expect(response.body.integrations).toHaveLength(3); + // Now includes unconfigured Puppetserver and Hiera + expect(response.body.integrations).toHaveLength(4); // Check first integration const puppetdb = response.body.integrations.find( @@ -144,6 +144,15 @@ describe("Integration Status API", () => { expect(puppetserver).toBeDefined(); expect(puppetserver.type).toBe("information"); expect(puppetserver.status).toBe("not_configured"); + + // Check unconfigured Hiera + const hiera = response.body.integrations.find( + (i: { name: string }) => i.name === "hiera", + ); + expect(hiera).toBeDefined(); + expect(hiera.type).toBe("information"); + expect(hiera.status).toBe("not_configured"); + expect(hiera.message).toBe("Hiera integration is not configured"); }); it("should return error status for unhealthy integrations", async () => { @@ -201,8 +210,8 @@ describe("Integration Status API", () => { .get("/api/integrations/status") .expect(200); - // Should have unconfigured puppetdb, puppetserver, and bolt entries - expect(response.body.integrations).toHaveLength(3); + // Should have unconfigured puppetdb, puppetserver, bolt, and hiera entries + expect(response.body.integrations).toHaveLength(4); expect(response.body.timestamp).toBeDefined(); const puppetdb = response.body.integrations.find( @@ -224,6 +233,13 @@ describe("Integration Status API", () => { ); expect(bolt).toBeDefined(); expect(bolt.status).toBe("not_configured"); + + const hiera = response.body.integrations.find( + (i: { name: string }) => i.name === "hiera", + ); + expect(hiera).toBeDefined(); + expect(hiera.status).toBe("not_configured"); + expect(hiera.message).toBe("Hiera integration is not configured"); }); it("should use cached results by default", async () => { @@ -232,8 +248,8 @@ describe("Integration Status API", () => { .expect(200); expect(response.body.cached).toBe(true); - // Now includes unconfigured Puppetserver - expect(response.body.integrations).toHaveLength(3); + // Now includes unconfigured Puppetserver and Hiera + expect(response.body.integrations).toHaveLength(4); }); it("should refresh health checks when requested", async () => { @@ -242,8 +258,8 @@ describe("Integration Status API", () => { .expect(200); expect(response.body.cached).toBe(false); - // Now includes unconfigured Puppetserver - expect(response.body.integrations).toHaveLength(3); + // Now includes unconfigured Puppetserver and Hiera + expect(response.body.integrations).toHaveLength(4); }); }); }); diff --git a/backend/test/integrations/CodeAnalyzer.test.ts b/backend/test/integrations/CodeAnalyzer.test.ts new file mode 100644 index 0000000..644c8ce --- /dev/null +++ b/backend/test/integrations/CodeAnalyzer.test.ts @@ -0,0 +1,551 @@ +/** + * CodeAnalyzer Unit Tests + * + * Tests for the CodeAnalyzer class that performs static analysis + * of Puppet code in a control repository. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { CodeAnalyzer } from "../../src/integrations/hiera/CodeAnalyzer"; +import { HieraScanner } from "../../src/integrations/hiera/HieraScanner"; +import type { CodeAnalysisConfig } from "../../src/integrations/hiera/types"; + +describe("CodeAnalyzer", () => { + let analyzer: CodeAnalyzer; + let testDir: string; + let config: CodeAnalysisConfig; + + beforeEach(() => { + // Create a temporary test directory + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "code-analyzer-test-")); + + // Create test control repo structure + createTestControlRepo(testDir); + + // Create analyzer config + config = { + enabled: true, + lintEnabled: true, + moduleUpdateCheck: true, + analysisInterval: 300, + exclusionPatterns: [], + }; + + analyzer = new CodeAnalyzer(testDir, config); + }); + + afterEach(() => { + // Clean up test directory + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + describe("initialization", () => { + it("should initialize successfully with valid control repo", async () => { + await analyzer.initialize(); + + expect(analyzer.isInitialized()).toBe(true); + }); + + it("should discover classes from manifests", async () => { + await analyzer.initialize(); + + const classes = analyzer.getClasses(); + expect(classes.size).toBeGreaterThan(0); + expect(classes.has("profile::nginx")).toBe(true); + expect(classes.has("profile::base")).toBe(true); + }); + + it("should discover defined types from manifests", async () => { + await analyzer.initialize(); + + const definedTypes = analyzer.getDefinedTypes(); + expect(definedTypes.has("profile::vhost")).toBe(true); + }); + + it("should handle missing directories gracefully", async () => { + // Remove manifests directory + fs.rmSync(path.join(testDir, "manifests"), { recursive: true, force: true }); + + await analyzer.initialize(); + + expect(analyzer.isInitialized()).toBe(true); + expect(analyzer.getClasses().size).toBe(0); + }); + }); + + describe("analyze", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should return complete analysis result", async () => { + const result = await analyzer.analyze(); + + expect(result.unusedCode).toBeDefined(); + expect(result.lintIssues).toBeDefined(); + expect(result.moduleUpdates).toBeDefined(); + expect(result.statistics).toBeDefined(); + expect(result.analyzedAt).toBeDefined(); + }); + + it("should cache analysis results", async () => { + const result1 = await analyzer.analyze(); + const result2 = await analyzer.analyze(); + + // Should return same cached result + expect(result1.analyzedAt).toBe(result2.analyzedAt); + }); + }); + + describe("getUnusedCode", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should detect unused classes", async () => { + const unusedCode = await analyzer.getUnusedCode(); + + // profile::unused is not included anywhere + const unusedClassNames = unusedCode.unusedClasses.map((c) => c.name); + expect(unusedClassNames).toContain("profile::unused"); + }); + + it("should include file and line info for unused items", async () => { + const unusedCode = await analyzer.getUnusedCode(); + + for (const item of unusedCode.unusedClasses) { + expect(item.file).toBeDefined(); + expect(item.line).toBeGreaterThan(0); + expect(item.type).toBe("class"); + } + }); + + it("should detect unused defined types", async () => { + const unusedCode = await analyzer.getUnusedCode(); + + // profile::unused_type is not instantiated anywhere + const unusedTypeNames = unusedCode.unusedDefinedTypes.map((t) => t.name); + expect(unusedTypeNames).toContain("profile::unused_type"); + }); + + it("should detect unused Hiera keys when scanner is set", async () => { + // Create and initialize HieraScanner + const scanner = new HieraScanner(testDir, "data"); + await scanner.scan(); + analyzer.setHieraScanner(scanner); + + const unusedCode = await analyzer.getUnusedCode(); + + // unused_key is not referenced in any manifest + const unusedKeyNames = unusedCode.unusedHieraKeys.map((k) => k.name); + expect(unusedKeyNames).toContain("unused_key"); + }); + }); + + describe("exclusion patterns", () => { + it("should exclude items matching exclusion patterns", async () => { + // Create analyzer with exclusion patterns + const configWithExclusions: CodeAnalysisConfig = { + ...config, + exclusionPatterns: ["profile::unused*"], + }; + const analyzerWithExclusions = new CodeAnalyzer(testDir, configWithExclusions); + await analyzerWithExclusions.initialize(); + + const unusedCode = await analyzerWithExclusions.getUnusedCode(); + + // profile::unused should be excluded + const unusedClassNames = unusedCode.unusedClasses.map((c) => c.name); + expect(unusedClassNames).not.toContain("profile::unused"); + }); + + it("should support wildcard patterns", async () => { + const configWithExclusions: CodeAnalysisConfig = { + ...config, + exclusionPatterns: ["*::unused*"], + }; + const analyzerWithExclusions = new CodeAnalyzer(testDir, configWithExclusions); + await analyzerWithExclusions.initialize(); + + const unusedCode = await analyzerWithExclusions.getUnusedCode(); + + // Both profile::unused and profile::unused_type should be excluded + const unusedClassNames = unusedCode.unusedClasses.map((c) => c.name); + const unusedTypeNames = unusedCode.unusedDefinedTypes.map((t) => t.name); + expect(unusedClassNames).not.toContain("profile::unused"); + expect(unusedTypeNames).not.toContain("profile::unused_type"); + }); + }); + + describe("getLintIssues", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should detect lint issues", async () => { + const issues = await analyzer.getLintIssues(); + + expect(issues.length).toBeGreaterThan(0); + }); + + it("should include file, line, and severity for each issue", async () => { + const issues = await analyzer.getLintIssues(); + + for (const issue of issues) { + expect(issue.file).toBeDefined(); + expect(issue.line).toBeGreaterThan(0); + expect(["error", "warning", "info"]).toContain(issue.severity); + expect(issue.message).toBeDefined(); + expect(issue.rule).toBeDefined(); + } + }); + + it("should detect trailing whitespace", async () => { + const issues = await analyzer.getLintIssues(); + + const trailingWhitespaceIssues = issues.filter( + (i) => i.rule === "trailing_whitespace" + ); + expect(trailingWhitespaceIssues.length).toBeGreaterThan(0); + }); + }); + + describe("filterIssues", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should filter by severity", async () => { + const allIssues = await analyzer.getLintIssues(); + const warningsOnly = analyzer.filterIssues(allIssues, { + severity: ["warning"], + }); + + expect(warningsOnly.every((i) => i.severity === "warning")).toBe(true); + }); + + it("should filter by type", async () => { + const allIssues = await analyzer.getLintIssues(); + const trailingOnly = analyzer.filterIssues(allIssues, { + types: ["trailing_whitespace"], + }); + + expect(trailingOnly.every((i) => i.rule === "trailing_whitespace")).toBe(true); + }); + + it("should combine filters", async () => { + const allIssues = await analyzer.getLintIssues(); + const filtered = analyzer.filterIssues(allIssues, { + severity: ["warning"], + types: ["trailing_whitespace"], + }); + + expect( + filtered.every( + (i) => i.severity === "warning" && i.rule === "trailing_whitespace" + ) + ).toBe(true); + }); + }); + + describe("countIssues", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should count issues by severity", async () => { + const issues = await analyzer.getLintIssues(); + const counts = analyzer.countIssues(issues); + + expect(counts.bySeverity).toBeDefined(); + expect(typeof counts.bySeverity.error).toBe("number"); + expect(typeof counts.bySeverity.warning).toBe("number"); + expect(typeof counts.bySeverity.info).toBe("number"); + }); + + it("should count issues by rule", async () => { + const issues = await analyzer.getLintIssues(); + const counts = analyzer.countIssues(issues); + + expect(counts.byRule).toBeDefined(); + expect(counts.total).toBe(issues.length); + }); + + it("should have correct total", async () => { + const issues = await analyzer.getLintIssues(); + const counts = analyzer.countIssues(issues); + + const severityTotal = + counts.bySeverity.error + + counts.bySeverity.warning + + counts.bySeverity.info; + expect(severityTotal).toBe(counts.total); + }); + }); + + describe("getUsageStatistics", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should return usage statistics", async () => { + const stats = await analyzer.getUsageStatistics(); + + expect(stats.totalManifests).toBeGreaterThan(0); + expect(stats.totalClasses).toBeGreaterThan(0); + expect(stats.linesOfCode).toBeGreaterThan(0); + }); + + it("should count classes correctly", async () => { + const stats = await analyzer.getUsageStatistics(); + + expect(stats.totalClasses).toBe(analyzer.getClasses().size); + }); + + it("should count defined types correctly", async () => { + const stats = await analyzer.getUsageStatistics(); + + expect(stats.totalDefinedTypes).toBe(analyzer.getDefinedTypes().size); + }); + + it("should rank classes by usage frequency", async () => { + const stats = await analyzer.getUsageStatistics(); + + // Verify mostUsedClasses is sorted by usageCount descending + for (let i = 1; i < stats.mostUsedClasses.length; i++) { + expect(stats.mostUsedClasses[i - 1].usageCount).toBeGreaterThanOrEqual( + stats.mostUsedClasses[i].usageCount + ); + } + }); + + it("should rank resources by count", async () => { + const stats = await analyzer.getUsageStatistics(); + + // Verify mostUsedResources is sorted by count descending + for (let i = 1; i < stats.mostUsedResources.length; i++) { + expect(stats.mostUsedResources[i - 1].count).toBeGreaterThanOrEqual( + stats.mostUsedResources[i].count + ); + } + }); + + it("should include class usage information", async () => { + const stats = await analyzer.getUsageStatistics(); + + // profile::base is included by profile::nginx + const baseClass = stats.mostUsedClasses.find(c => c.name === "profile::base"); + expect(baseClass).toBeDefined(); + expect(baseClass?.usageCount).toBeGreaterThan(0); + }); + + it("should include resource usage information", async () => { + const stats = await analyzer.getUsageStatistics(); + + // package and service resources are used in the test manifests + const packageResource = stats.mostUsedResources.find(r => r.type === "package"); + expect(packageResource).toBeDefined(); + expect(packageResource?.count).toBeGreaterThan(0); + }); + + it("should count manifests correctly", async () => { + const stats = await analyzer.getUsageStatistics(); + + // We created 5 manifest files in the test setup + expect(stats.totalManifests).toBe(5); + }); + + it("should calculate lines of code", async () => { + const stats = await analyzer.getUsageStatistics(); + + // Lines of code should be positive and reasonable + expect(stats.linesOfCode).toBeGreaterThan(0); + expect(stats.linesOfCode).toBeLessThan(1000); // Sanity check for test data + }); + + it("should count functions when present", async () => { + const stats = await analyzer.getUsageStatistics(); + + // totalFunctions should be a number (may be 0 if no functions in test repo) + expect(typeof stats.totalFunctions).toBe("number"); + expect(stats.totalFunctions).toBeGreaterThanOrEqual(0); + }); + }); + + describe("getModuleUpdates", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should parse Puppetfile modules", async () => { + const updates = await analyzer.getModuleUpdates(); + + expect(updates.length).toBeGreaterThan(0); + }); + + it("should extract module names and versions", async () => { + const updates = await analyzer.getModuleUpdates(); + + const stdlibModule = updates.find((m) => m.name.includes("stdlib")); + expect(stdlibModule).toBeDefined(); + expect(stdlibModule?.currentVersion).toBe("8.0.0"); + }); + + it("should identify forge vs git modules", async () => { + const updates = await analyzer.getModuleUpdates(); + + const forgeModules = updates.filter((m) => m.source === "forge"); + const gitModules = updates.filter((m) => m.source === "git"); + + expect(forgeModules.length).toBeGreaterThan(0); + expect(gitModules.length).toBeGreaterThan(0); + }); + }); + + describe("cache management", () => { + beforeEach(async () => { + await analyzer.initialize(); + }); + + it("should clear cache", async () => { + // Populate cache + await analyzer.analyze(); + + // Clear cache + analyzer.clearCache(); + + // Next analysis should have different timestamp + const result1 = await analyzer.analyze(); + analyzer.clearCache(); + + // Small delay to ensure different timestamp + await new Promise((resolve) => setTimeout(resolve, 10)); + + const result2 = await analyzer.analyze(); + + expect(result1.analyzedAt).not.toBe(result2.analyzedAt); + }); + + it("should reload analyzer", async () => { + const classesBefore = analyzer.getClasses().size; + + await analyzer.reload(); + + const classesAfter = analyzer.getClasses().size; + expect(classesAfter).toBe(classesBefore); + }); + }); + + describe("error handling", () => { + it("should throw error when not initialized", async () => { + await expect(analyzer.analyze()).rejects.toThrow("not initialized"); + }); + }); +}); + +/** + * Create a test control repository structure + */ +function createTestControlRepo(testDir: string): void { + // Create directories + fs.mkdirSync(path.join(testDir, "manifests", "profile"), { recursive: true }); + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + + // Create profile::nginx class + const nginxManifest = ` +# @summary Manages nginx configuration +class profile::nginx ( + Integer $port = 80, + Integer $workers = 4, +) { + include profile::base + + package { 'nginx': + ensure => present, + } + + service { 'nginx': + ensure => running, + } +} +`; + fs.writeFileSync(path.join(testDir, "manifests", "profile", "nginx.pp"), nginxManifest); + + // Create profile::base class + const baseManifest = ` +class profile::base { + package { 'vim': + ensure => present, + } +} +`; + fs.writeFileSync(path.join(testDir, "manifests", "profile", "base.pp"), baseManifest); + + // Create profile::unused class (not included anywhere) + const unusedManifest = ` +class profile::unused { + notify { 'unused': } +} +`; + fs.writeFileSync(path.join(testDir, "manifests", "profile", "unused.pp"), unusedManifest); + + // Create profile::vhost defined type + const vhostManifest = ` +define profile::vhost ( + String $docroot, + Integer $port = 80, +) { + file { "/etc/nginx/sites-available/\${title}": + ensure => file, + content => "server { listen \${port}; root \${docroot}; }", + } +} +`; + fs.writeFileSync(path.join(testDir, "manifests", "profile", "vhost.pp"), vhostManifest); + + // Create profile::unused_type defined type (not instantiated anywhere) + const unusedTypeManifest = ` +define profile::unused_type ( + String $param, +) { + notify { "unused_type: \${title}": } +} +`; + fs.writeFileSync(path.join(testDir, "manifests", "profile", "unused_type.pp"), unusedTypeManifest); + + // Create hieradata + const commonData = ` +profile::nginx::port: 8080 +profile::nginx::workers: 4 +unused_key: "this key is not used" +`; + fs.writeFileSync(path.join(testDir, "data", "common.yaml"), commonData); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(testDir, "hiera.yaml"), hieraConfig); + + // Create Puppetfile + const puppetfile = ` +forge 'https://forge.puppet.com' + +mod 'puppetlabs/stdlib', '8.0.0' +mod 'puppetlabs/concat', '7.0.0' + +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git', + :tag => 'v1.0.0' +`; + fs.writeFileSync(path.join(testDir, "Puppetfile"), puppetfile); +} diff --git a/backend/test/integrations/FactService.test.ts b/backend/test/integrations/FactService.test.ts new file mode 100644 index 0000000..9bbcf85 --- /dev/null +++ b/backend/test/integrations/FactService.test.ts @@ -0,0 +1,388 @@ +/** + * FactService Unit Tests + */ + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import { FactService } from "../../src/integrations/hiera/FactService"; +import type { IntegrationManager } from "../../src/integrations/IntegrationManager"; +import type { InformationSourcePlugin } from "../../src/integrations/types"; +import type { Facts } from "../../src/bolt/types"; + +// Mock fs module +vi.mock("fs"); + +describe("FactService", () => { + let factService: FactService; + let mockIntegrationManager: IntegrationManager; + let mockPuppetDBSource: InformationSourcePlugin; + + const testNodeId = "node1.example.com"; + const testLocalFactsPath = "/tmp/facts"; + + beforeEach(() => { + vi.clearAllMocks(); + + // Create mock PuppetDB source + mockPuppetDBSource = { + name: "puppetdb", + type: "information", + isInitialized: vi.fn().mockReturnValue(true), + getNodeFacts: vi.fn(), + getInventory: vi.fn().mockResolvedValue([]), + getNodeData: vi.fn(), + initialize: vi.fn(), + healthCheck: vi.fn(), + getConfig: vi.fn(), + } as unknown as InformationSourcePlugin; + + // Create mock integration manager + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("getFacts", () => { + it("should return facts from PuppetDB when available", async () => { + const puppetDBFacts: Facts = { + nodeId: testNodeId, + gatheredAt: "2024-01-01T00:00:00Z", + facts: { + os: { + family: "RedHat", + name: "CentOS", + release: { full: "7.9", major: "7" }, + }, + processors: { count: 4, models: ["Intel Xeon"] }, + memory: { system: { total: "16 GB", available: "8 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }; + + (mockPuppetDBSource.getNodeFacts as ReturnType).mockResolvedValue(puppetDBFacts); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("puppetdb"); + expect(result.facts).toEqual(puppetDBFacts); + expect(result.warnings).toBeUndefined(); + }); + + it("should fall back to local facts when PuppetDB fails", async () => { + (mockPuppetDBSource.getNodeFacts as ReturnType).mockRejectedValue( + new Error("PuppetDB error") + ); + + const localFactContent = JSON.stringify({ + name: testNodeId, + values: { + os: { + family: "Debian", + name: "Ubuntu", + release: { full: "20.04", major: "20" }, + }, + processors: { count: 2, models: ["AMD EPYC"] }, + memory: { system: { total: "8 GB", available: "4 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(localFactContent); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("Debian"); + expect(result.warnings).toContain("Using local fact files - facts may be outdated"); + }); + + it("should return empty facts with warning when no facts available", async () => { + (mockPuppetDBSource.getNodeFacts as ReturnType).mockRejectedValue( + new Error("Node not found") + ); + vi.mocked(fs.existsSync).mockReturnValue(false); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("Unknown"); + expect(result.warnings).toContain(`No facts available for node '${testNodeId}'`); + }); + + it("should return empty facts when PuppetDB not initialized and no local facts", async () => { + (mockPuppetDBSource.isInitialized as ReturnType).mockReturnValue(false); + vi.mocked(fs.existsSync).mockReturnValue(false); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.warnings).toContain(`No facts available for node '${testNodeId}'`); + }); + }); + + describe("local fact file parsing", () => { + it("should parse Puppetserver format with name and values", async () => { + (mockPuppetDBSource.isInitialized as ReturnType).mockReturnValue(false); + + const localFactContent = JSON.stringify({ + name: testNodeId, + values: { + os: { + family: "RedHat", + name: "CentOS", + release: { full: "8.5", major: "8" }, + }, + processors: { count: 8, models: ["Intel Core i7"] }, + memory: { system: { total: "32 GB", available: "16 GB" } }, + networking: { + hostname: "node1", + fqdn: "node1.example.com", + interfaces: { eth0: { ip: "192.168.1.100" } }, + }, + custom_fact: "custom_value", + }, + }); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(localFactContent); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("RedHat"); + expect(result.facts.facts.os.name).toBe("CentOS"); + expect(result.facts.facts.processors.count).toBe(8); + expect(result.facts.facts.networking.hostname).toBe("node1"); + expect(result.facts.facts.custom_fact).toBe("custom_value"); + }); + + it("should parse flat fact structure", async () => { + (mockPuppetDBSource.isInitialized as ReturnType).mockReturnValue(false); + + const flatFactContent = JSON.stringify({ + os: { + family: "Debian", + name: "Ubuntu", + release: { full: "22.04", major: "22" }, + }, + processors: { count: 4, models: ["ARM Cortex"] }, + memory: { system: { total: "4 GB", available: "2 GB" } }, + networking: { hostname: "node2", interfaces: {} }, + }); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(flatFactContent); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("Debian"); + expect(result.facts.facts.os.name).toBe("Ubuntu"); + }); + + it("should provide default values for missing required fields", async () => { + (mockPuppetDBSource.isInitialized as ReturnType).mockReturnValue(false); + + const minimalFactContent = JSON.stringify({ + name: testNodeId, + values: { + custom_fact: "value", + }, + }); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(minimalFactContent); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("Unknown"); + expect(result.facts.facts.os.name).toBe("Unknown"); + expect(result.facts.facts.processors.count).toBe(0); + expect(result.facts.facts.memory.system.total).toBe("Unknown"); + expect(result.facts.facts.networking.hostname).toBe("Unknown"); + expect(result.facts.facts.custom_fact).toBe("value"); + }); + + it("should handle invalid JSON gracefully", async () => { + (mockPuppetDBSource.isInitialized as ReturnType).mockReturnValue(false); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue("invalid json {"); + + const result = await factService.getFacts(testNodeId); + + // Should return empty facts with warning + expect(result.source).toBe("local"); + expect(result.warnings).toContain(`No facts available for node '${testNodeId}'`); + }); + }); + + describe("getFactSource", () => { + it("should return puppetdb when PuppetDB has facts", async () => { + const puppetDBFacts: Facts = { + nodeId: testNodeId, + gatheredAt: "2024-01-01T00:00:00Z", + facts: { + os: { family: "RedHat", name: "CentOS", release: { full: "7", major: "7" } }, + processors: { count: 1, models: [] }, + memory: { system: { total: "1 GB", available: "1 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }; + + (mockPuppetDBSource.getNodeFacts as ReturnType).mockResolvedValue(puppetDBFacts); + + const source = await factService.getFactSource(testNodeId); + + expect(source).toBe("puppetdb"); + }); + + it("should return local when only local facts available", async () => { + (mockPuppetDBSource.getNodeFacts as ReturnType).mockRejectedValue( + new Error("Not found") + ); + vi.mocked(fs.existsSync).mockReturnValue(true); + + const source = await factService.getFactSource(testNodeId); + + expect(source).toBe("local"); + }); + + it("should return none when no facts available", async () => { + (mockPuppetDBSource.getNodeFacts as ReturnType).mockRejectedValue( + new Error("Not found") + ); + vi.mocked(fs.existsSync).mockReturnValue(false); + + const source = await factService.getFactSource(testNodeId); + + expect(source).toBe("none"); + }); + }); + + describe("listAvailableNodes", () => { + it("should combine nodes from PuppetDB and local files", async () => { + (mockPuppetDBSource.getInventory as ReturnType).mockResolvedValue([ + { id: "node1.example.com" }, + { id: "node2.example.com" }, + ]); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readdirSync).mockReturnValue([ + "node2.example.com.json", + "node3.example.com.json", + ] as unknown as fs.Dirent[]); + + const nodes = await factService.listAvailableNodes(); + + expect(nodes).toContain("node1.example.com"); + expect(nodes).toContain("node2.example.com"); + expect(nodes).toContain("node3.example.com"); + expect(nodes).toHaveLength(3); // Deduplicated + }); + + it("should handle PuppetDB errors gracefully", async () => { + (mockPuppetDBSource.getInventory as ReturnType).mockRejectedValue( + new Error("Connection failed") + ); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readdirSync).mockReturnValue([ + "node1.example.com.json", + ] as unknown as fs.Dirent[]); + + const nodes = await factService.listAvailableNodes(); + + expect(nodes).toContain("node1.example.com"); + expect(nodes).toHaveLength(1); + }); + }); + + describe("fact source priority", () => { + it("should prefer PuppetDB when preferPuppetDB is true", async () => { + const puppetDBFacts: Facts = { + nodeId: testNodeId, + gatheredAt: "2024-01-01T00:00:00Z", + facts: { + os: { family: "RedHat", name: "CentOS", release: { full: "7", major: "7" } }, + processors: { count: 1, models: [] }, + memory: { system: { total: "1 GB", available: "1 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }; + + (mockPuppetDBSource.getNodeFacts as ReturnType).mockResolvedValue(puppetDBFacts); + + // Local facts also available + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify({ + name: testNodeId, + values: { + os: { family: "Debian", name: "Ubuntu", release: { full: "20.04", major: "20" } }, + }, + })); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("puppetdb"); + expect(result.facts.facts.os.family).toBe("RedHat"); + }); + + it("should prefer local facts when preferPuppetDB is false", async () => { + factService.setPreferPuppetDB(false); + + const localFactContent = JSON.stringify({ + name: testNodeId, + values: { + os: { family: "Debian", name: "Ubuntu", release: { full: "20.04", major: "20" } }, + processors: { count: 2, models: [] }, + memory: { system: { total: "2 GB", available: "1 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(localFactContent); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("local"); + expect(result.facts.facts.os.family).toBe("Debian"); + }); + + it("should fall back to PuppetDB when local facts unavailable and preferPuppetDB is false", async () => { + factService.setPreferPuppetDB(false); + + vi.mocked(fs.existsSync).mockReturnValue(false); + + const puppetDBFacts: Facts = { + nodeId: testNodeId, + gatheredAt: "2024-01-01T00:00:00Z", + facts: { + os: { family: "RedHat", name: "CentOS", release: { full: "7", major: "7" } }, + processors: { count: 1, models: [] }, + memory: { system: { total: "1 GB", available: "1 GB" } }, + networking: { hostname: "node1", interfaces: {} }, + }, + }; + + (mockPuppetDBSource.getNodeFacts as ReturnType).mockResolvedValue(puppetDBFacts); + + const result = await factService.getFacts(testNodeId); + + expect(result.source).toBe("puppetdb"); + }); + }); +}); diff --git a/backend/test/integrations/ForgeClient.test.ts b/backend/test/integrations/ForgeClient.test.ts new file mode 100644 index 0000000..96b02fe --- /dev/null +++ b/backend/test/integrations/ForgeClient.test.ts @@ -0,0 +1,259 @@ +/** + * ForgeClient Unit Tests + * + * Tests for the ForgeClient class that queries Puppet Forge API + * for module information and security advisories. + */ + +import { describe, it, expect, beforeEach } from "vitest"; +import { ForgeClient } from "../../src/integrations/hiera/ForgeClient"; +import type { ParsedModule } from "../../src/integrations/hiera/PuppetfileParser"; + +describe("ForgeClient", () => { + let client: ForgeClient; + + beforeEach(() => { + client = new ForgeClient(); + }); + + describe("isNewerVersion", () => { + it("should detect newer major version", () => { + expect(client.isNewerVersion("2.0.0", "1.0.0")).toBe(true); + expect(client.isNewerVersion("1.0.0", "2.0.0")).toBe(false); + }); + + it("should detect newer minor version", () => { + expect(client.isNewerVersion("1.2.0", "1.1.0")).toBe(true); + expect(client.isNewerVersion("1.1.0", "1.2.0")).toBe(false); + }); + + it("should detect newer patch version", () => { + expect(client.isNewerVersion("1.0.2", "1.0.1")).toBe(true); + expect(client.isNewerVersion("1.0.1", "1.0.2")).toBe(false); + }); + + it("should handle equal versions", () => { + expect(client.isNewerVersion("1.0.0", "1.0.0")).toBe(false); + }); + + it("should handle versions with v prefix", () => { + expect(client.isNewerVersion("v2.0.0", "v1.0.0")).toBe(true); + expect(client.isNewerVersion("v1.0.0", "v2.0.0")).toBe(false); + }); + + it("should handle special version strings", () => { + expect(client.isNewerVersion("2.0.0", "latest")).toBe(false); + expect(client.isNewerVersion("2.0.0", "HEAD")).toBe(false); + expect(client.isNewerVersion("2.0.0", "local")).toBe(false); + }); + + it("should handle versions with pre-release tags", () => { + expect(client.isNewerVersion("2.0.0", "1.0.0-rc1")).toBe(true); + expect(client.isNewerVersion("1.0.0-rc2", "1.0.0-rc1")).toBe(false); // Same numeric part + }); + + it("should handle versions with different segment counts", () => { + expect(client.isNewerVersion("1.0.0.1", "1.0.0")).toBe(true); + expect(client.isNewerVersion("1.0.0", "1.0.0.1")).toBe(false); + }); + }); + + describe("addSecurityAdvisory", () => { + it("should add security advisory for a module", () => { + client.addSecurityAdvisory("puppetlabs/apache", { + id: "CVE-2023-1234", + title: "Test vulnerability", + severity: "high", + affectedVersions: "< 2.0.0", + fixedVersion: "2.0.0", + description: "Test description", + publishedAt: "2023-01-01", + }); + + const advisories = client.getSecurityAdvisories("puppetlabs/apache", "1.0.0"); + expect(advisories).toHaveLength(1); + expect(advisories[0].id).toBe("CVE-2023-1234"); + }); + + it("should handle multiple advisories for same module", () => { + client.addSecurityAdvisory("puppetlabs/apache", { + id: "CVE-2023-1234", + title: "First vulnerability", + severity: "high", + affectedVersions: "< 2.0.0", + description: "Test", + publishedAt: "2023-01-01", + }); + + client.addSecurityAdvisory("puppetlabs/apache", { + id: "CVE-2023-5678", + title: "Second vulnerability", + severity: "medium", + affectedVersions: "< 3.0.0", + description: "Test", + publishedAt: "2023-06-01", + }); + + const advisories = client.getSecurityAdvisories("puppetlabs/apache"); + expect(advisories).toHaveLength(2); + }); + }); + + describe("getSecurityAdvisories", () => { + beforeEach(() => { + client.addSecurityAdvisory("puppetlabs/apache", { + id: "CVE-2023-1234", + title: "Test vulnerability", + severity: "high", + affectedVersions: "< 2.0.0", + fixedVersion: "2.0.0", + description: "Test description", + publishedAt: "2023-01-01", + }); + }); + + it("should return advisories for affected version", () => { + const advisories = client.getSecurityAdvisories("puppetlabs/apache", "1.5.0"); + expect(advisories).toHaveLength(1); + }); + + it("should not return advisories for fixed version", () => { + const advisories = client.getSecurityAdvisories("puppetlabs/apache", "2.0.0"); + expect(advisories).toHaveLength(0); + }); + + it("should not return advisories for version after fix", () => { + const advisories = client.getSecurityAdvisories("puppetlabs/apache", "3.0.0"); + expect(advisories).toHaveLength(0); + }); + + it("should return all advisories when no version specified", () => { + const advisories = client.getSecurityAdvisories("puppetlabs/apache"); + expect(advisories).toHaveLength(1); + }); + + it("should return empty array for unknown module", () => { + const advisories = client.getSecurityAdvisories("unknown/module", "1.0.0"); + expect(advisories).toHaveLength(0); + }); + + it("should normalize module slug format", () => { + const advisories1 = client.getSecurityAdvisories("puppetlabs/apache", "1.0.0"); + const advisories2 = client.getSecurityAdvisories("puppetlabs-apache", "1.0.0"); + expect(advisories1).toHaveLength(1); + expect(advisories2).toHaveLength(1); + }); + }); + + describe("toModuleUpdates", () => { + it("should convert update results to ModuleUpdate format", () => { + const results = [ + { + module: { + name: "puppetlabs/stdlib", + version: "8.0.0", + source: "forge" as const, + line: 1, + }, + currentVersion: "8.0.0", + latestVersion: "9.0.0", + hasUpdate: true, + deprecated: false, + }, + ]; + + const updates = client.toModuleUpdates(results); + + expect(updates).toHaveLength(1); + expect(updates[0].name).toBe("puppetlabs/stdlib"); + expect(updates[0].currentVersion).toBe("8.0.0"); + expect(updates[0].latestVersion).toBe("9.0.0"); + expect(updates[0].hasSecurityAdvisory).toBe(false); + }); + + it("should include deprecation info in changelog", () => { + const results = [ + { + module: { + name: "old/module", + version: "1.0.0", + source: "forge" as const, + line: 1, + }, + currentVersion: "1.0.0", + latestVersion: "1.0.0", + hasUpdate: false, + deprecated: true, + deprecatedFor: "Use new/module instead", + supersededBy: "new/module", + }, + ]; + + const updates = client.toModuleUpdates(results); + + expect(updates[0].changelog).toContain("Deprecated"); + expect(updates[0].changelog).toContain("Use new/module instead"); + expect(updates[0].changelog).toContain("Superseded by new/module"); + }); + + it("should include security advisory info", () => { + const results = [ + { + module: { + name: "puppetlabs/apache", + version: "1.0.0", + source: "forge" as const, + line: 1, + }, + currentVersion: "1.0.0", + latestVersion: "2.0.0", + hasUpdate: true, + deprecated: false, + securityStatus: { + moduleSlug: "puppetlabs-apache", + hasAdvisories: true, + advisories: [ + { + id: "CVE-2023-1234", + title: "Critical vulnerability", + severity: "critical" as const, + affectedVersions: "< 2.0.0", + description: "Test", + publishedAt: "2023-01-01", + }, + ], + deprecated: false, + }, + }, + ]; + + const updates = client.toModuleUpdates(results); + + expect(updates[0].hasSecurityAdvisory).toBe(true); + expect(updates[0].changelog).toContain("Security"); + expect(updates[0].changelog).toContain("CRITICAL"); + expect(updates[0].changelog).toContain("Critical vulnerability"); + }); + }); + + describe("checkForUpdates", () => { + it("should handle git modules without forge check", async () => { + const modules: ParsedModule[] = [ + { + name: "custom_module", + version: "v1.0.0", + source: "git", + gitUrl: "https://github.com/example/custom.git", + gitTag: "v1.0.0", + line: 1, + }, + ]; + + const results = await client.checkForUpdates(modules); + + expect(results).toHaveLength(1); + expect(results[0].module.name).toBe("custom_module"); + expect(results[0].hasUpdate).toBe(false); + }); + }); +}); diff --git a/backend/test/integrations/HieraParser.test.ts b/backend/test/integrations/HieraParser.test.ts new file mode 100644 index 0000000..e637c8e --- /dev/null +++ b/backend/test/integrations/HieraParser.test.ts @@ -0,0 +1,499 @@ +/** + * HieraParser Unit Tests + */ + +import { describe, it, expect, beforeEach } from "vitest"; +import { HieraParser } from "../../src/integrations/hiera/HieraParser"; +import type { Facts, HieraConfig } from "../../src/integrations/hiera/types"; + +describe("HieraParser", () => { + let parser: HieraParser; + + beforeEach(() => { + parser = new HieraParser("/tmp/test-control-repo"); + }); + + describe("parseContent", () => { + it("should parse a valid Hiera 5 configuration", () => { + const content = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Per-node data" + path: "nodes/%{facts.networking.fqdn}.yaml" + - name: "Per-OS defaults" + path: "os/%{facts.os.family}.yaml" + - name: "Common data" + path: "common.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(true); + expect(result.config).toBeDefined(); + expect(result.config?.version).toBe(5); + expect(result.config?.hierarchy).toHaveLength(3); + expect(result.config?.hierarchy[0].name).toBe("Per-node data"); + expect(result.config?.hierarchy[0].path).toBe("nodes/%{facts.networking.fqdn}.yaml"); + expect(result.config?.defaults?.datadir).toBe("data"); + expect(result.config?.defaults?.data_hash).toBe("yaml_data"); + }); + + it("should parse hierarchy with multiple paths", () => { + const content = ` +version: 5 +hierarchy: + - name: "Multiple paths" + paths: + - "nodes/%{facts.networking.fqdn}.yaml" + - "nodes/%{facts.networking.hostname}.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(true); + expect(result.config?.hierarchy[0].paths).toEqual([ + "nodes/%{facts.networking.fqdn}.yaml", + "nodes/%{facts.networking.hostname}.yaml", + ]); + }); + + + it("should parse hierarchy with glob patterns", () => { + const content = ` +version: 5 +hierarchy: + - name: "Glob pattern" + glob: "nodes/*.yaml" + - name: "Multiple globs" + globs: + - "environments/*.yaml" + - "roles/*.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(true); + expect(result.config?.hierarchy[0].glob).toBe("nodes/*.yaml"); + expect(result.config?.hierarchy[1].globs).toEqual([ + "environments/*.yaml", + "roles/*.yaml", + ]); + }); + + it("should parse hierarchy with mapped_paths", () => { + const content = ` +version: 5 +hierarchy: + - name: "Mapped paths" + mapped_paths: + - "facts.networking.interfaces" + - "interface" + - "interfaces/%{interface}.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(true); + expect(result.config?.hierarchy[0].mapped_paths).toEqual([ + "facts.networking.interfaces", + "interface", + "interfaces/%{interface}.yaml", + ]); + }); + + it("should detect yaml backend", () => { + const content = ` +version: 5 +defaults: + data_hash: yaml_data +hierarchy: + - name: "Common" + path: "common.yaml" +`; + + const result = parser.parseContent(content); + expect(result.success).toBe(true); + + const backend = parser.detectBackend(result.config!.hierarchy[0], result.config!.defaults); + expect(backend.type).toBe("yaml"); + }); + + it("should detect json backend", () => { + const content = ` +version: 5 +hierarchy: + - name: "JSON data" + path: "common.json" + data_hash: json_data +`; + + const result = parser.parseContent(content); + expect(result.success).toBe(true); + + const backend = parser.detectBackend(result.config!.hierarchy[0]); + expect(backend.type).toBe("json"); + }); + + it("should detect eyaml backend", () => { + const content = ` +version: 5 +hierarchy: + - name: "Encrypted data" + path: "secrets.eyaml" + lookup_key: eyaml_lookup_key +`; + + const result = parser.parseContent(content); + expect(result.success).toBe(true); + + const backend = parser.detectBackend(result.config!.hierarchy[0]); + expect(backend.type).toBe("eyaml"); + }); + }); + + + describe("error handling", () => { + it("should return error for invalid YAML syntax", () => { + const content = ` +version: 5 +hierarchy: + - name: "Bad YAML + path: unclosed quote +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error?.code).toBe("HIERA_PARSE_ERROR"); + expect(result.error?.message).toContain("YAML syntax error"); + }); + + it("should return error for unsupported Hiera version", () => { + const content = ` +version: 3 +hierarchy: + - name: "Old version" + path: "common.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(false); + expect(result.error?.code).toBe("HIERA_PARSE_ERROR"); + expect(result.error?.message).toContain("Unsupported Hiera version"); + }); + + it("should return error for missing hierarchy", () => { + const content = ` +version: 5 +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(false); + expect(result.error?.code).toBe("HIERA_PARSE_ERROR"); + expect(result.error?.message).toContain("hierarchy"); + }); + + it("should return error for hierarchy level without name", () => { + const content = ` +version: 5 +hierarchy: + - path: "common.yaml" +`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(false); + expect(result.error?.code).toBe("HIERA_PARSE_ERROR"); + expect(result.error?.message).toContain("name"); + }); + + it("should return error for non-object config", () => { + const content = `just a string`; + + const result = parser.parseContent(content); + + expect(result.success).toBe(false); + expect(result.error?.code).toBe("HIERA_PARSE_ERROR"); + }); + }); + + + describe("interpolatePath", () => { + const facts: Facts = { + nodeId: "node1.example.com", + gatheredAt: new Date().toISOString(), + facts: { + networking: { + fqdn: "node1.example.com", + hostname: "node1", + }, + os: { + family: "RedHat", + name: "CentOS", + }, + hostname: "node1", + environment: "production", + trusted: { + certname: "node1.example.com", + }, + }, + }; + + it("should interpolate facts.xxx syntax", () => { + const template = "nodes/%{facts.networking.fqdn}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("nodes/node1.example.com.yaml"); + }); + + it("should interpolate nested facts", () => { + const template = "os/%{facts.os.family}/%{facts.os.name}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("os/RedHat/CentOS.yaml"); + }); + + it("should interpolate legacy ::xxx syntax", () => { + const template = "nodes/%{::hostname}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("nodes/node1.yaml"); + }); + + it("should interpolate trusted.xxx syntax", () => { + const template = "nodes/%{trusted.certname}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("nodes/node1.example.com.yaml"); + }); + + it("should interpolate simple variable syntax", () => { + const template = "environments/%{environment}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("environments/production.yaml"); + }); + + it("should preserve unresolved variables", () => { + const template = "nodes/%{facts.nonexistent}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("nodes/%{facts.nonexistent}.yaml"); + }); + + it("should handle multiple variables in one path", () => { + const template = "%{facts.os.family}/%{facts.networking.hostname}/%{environment}.yaml"; + const result = parser.interpolatePath(template, facts); + expect(result).toBe("RedHat/node1/production.yaml"); + }); + }); + + + describe("parseLookupOptionsFromContent", () => { + it("should parse lookup_options with merge strategies", () => { + const content = ` +lookup_options: + profile::base::packages: + merge: deep + profile::nginx::config: + merge: hash + profile::users::list: + merge: unique +`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.size).toBe(3); + expect(result.get("profile::base::packages")?.merge).toBe("deep"); + expect(result.get("profile::nginx::config")?.merge).toBe("hash"); + expect(result.get("profile::users::list")?.merge).toBe("unique"); + }); + + it("should parse lookup_options with convert_to", () => { + const content = ` +lookup_options: + profile::packages: + convert_to: Array + profile::settings: + convert_to: Hash +`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.get("profile::packages")?.convert_to).toBe("Array"); + expect(result.get("profile::settings")?.convert_to).toBe("Hash"); + }); + + it("should parse lookup_options with knockout_prefix", () => { + const content = ` +lookup_options: + profile::base::packages: + merge: deep + knockout_prefix: "--" +`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.get("profile::base::packages")?.merge).toBe("deep"); + expect(result.get("profile::base::packages")?.knockout_prefix).toBe("--"); + }); + + it("should parse merge as object with strategy", () => { + const content = ` +lookup_options: + profile::config: + merge: + strategy: deep +`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.get("profile::config")?.merge).toBe("deep"); + }); + + it("should return empty map for content without lookup_options", () => { + const content = ` +profile::nginx::port: 8080 +profile::nginx::workers: 4 +`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.size).toBe(0); + }); + + it("should return empty map for invalid YAML", () => { + const content = `invalid: yaml: content:`; + + const result = parser.parseLookupOptionsFromContent(content); + + expect(result.size).toBe(0); + }); + }); + + + describe("validateConfig", () => { + it("should validate a correct configuration", () => { + const config: HieraConfig = { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: [ + { + name: "Common", + path: "common.yaml", + }, + ], + }; + + const result = parser.validateConfig(config); + + expect(result.valid).toBe(true); + expect(result.errors).toHaveLength(0); + }); + + it("should warn about hierarchy level without path", () => { + const config: HieraConfig = { + version: 5, + hierarchy: [ + { + name: "No path", + }, + ], + }; + + const result = parser.validateConfig(config); + + expect(result.warnings.length).toBeGreaterThan(0); + expect(result.warnings.some(w => w.includes("No path"))).toBe(true); + }); + + it("should warn about hierarchy level without data provider", () => { + const config: HieraConfig = { + version: 5, + hierarchy: [ + { + name: "No provider", + path: "common.yaml", + }, + ], + }; + + const result = parser.validateConfig(config); + + expect(result.warnings.length).toBeGreaterThan(0); + expect(result.warnings.some(w => w.includes("No provider"))).toBe(true); + }); + }); + + describe("expandHierarchyPaths", () => { + const facts: Facts = { + nodeId: "web1.example.com", + gatheredAt: new Date().toISOString(), + facts: { + networking: { + fqdn: "web1.example.com", + }, + os: { + family: "Debian", + }, + }, + }; + + it("should expand paths with fact interpolation", () => { + const config: HieraConfig = { + version: 5, + defaults: { + datadir: "data", + }, + hierarchy: [ + { + name: "Per-node", + path: "nodes/%{facts.networking.fqdn}.yaml", + }, + { + name: "Per-OS", + path: "os/%{facts.os.family}.yaml", + }, + { + name: "Common", + path: "common.yaml", + }, + ], + }; + + const paths = parser.expandHierarchyPaths(config, facts); + + expect(paths).toContain("data/nodes/web1.example.com.yaml"); + expect(paths).toContain("data/os/Debian.yaml"); + expect(paths).toContain("data/common.yaml"); + }); + + it("should use level-specific datadir", () => { + const config: HieraConfig = { + version: 5, + defaults: { + datadir: "data", + }, + hierarchy: [ + { + name: "Secrets", + path: "secrets.yaml", + datadir: "secrets", + }, + { + name: "Common", + path: "common.yaml", + }, + ], + }; + + const paths = parser.expandHierarchyPaths(config, facts); + + expect(paths).toContain("secrets/secrets.yaml"); + expect(paths).toContain("data/common.yaml"); + }); + }); +}); diff --git a/backend/test/integrations/HieraPlugin.test.ts b/backend/test/integrations/HieraPlugin.test.ts new file mode 100644 index 0000000..57c5c88 --- /dev/null +++ b/backend/test/integrations/HieraPlugin.test.ts @@ -0,0 +1,473 @@ +/** + * HieraPlugin Unit Tests + * + * Tests for the HieraPlugin class that provides Hiera data lookup + * and code analysis capabilities. + */ + +import { describe, it, expect, beforeEach, vi, afterEach } from "vitest"; +import * as fs from "fs"; +import { HieraPlugin } from "../../src/integrations/hiera/HieraPlugin"; +import type { IntegrationConfig } from "../../src/integrations/types"; +import type { IntegrationManager } from "../../src/integrations/IntegrationManager"; + +// Mock fs module +vi.mock("fs"); + +// Create mock instances +const mockHieraService = { + initialize: vi.fn().mockResolvedValue(undefined), + isInitialized: vi.fn().mockReturnValue(true), + getAllKeys: vi.fn().mockResolvedValue({ + keys: new Map(), + files: new Map(), + lastScan: new Date().toISOString(), + totalKeys: 10, + totalFiles: 5, + }), + getHieraConfig: vi.fn().mockReturnValue({ version: 5, hierarchy: [] }), + getScanner: vi.fn().mockReturnValue({ + getAllKeys: vi.fn().mockReturnValue([]), + }), + getFactService: vi.fn().mockReturnValue({ + getFacts: vi.fn().mockResolvedValue({ + facts: { nodeId: "test-node", gatheredAt: new Date().toISOString(), facts: {} }, + source: "local", + }), + }), + reloadControlRepo: vi.fn().mockResolvedValue(undefined), + invalidateCache: vi.fn(), + shutdown: vi.fn().mockResolvedValue(undefined), +}; + +const mockCodeAnalyzer = { + initialize: vi.fn().mockResolvedValue(undefined), + isInitialized: vi.fn().mockReturnValue(true), + setIntegrationManager: vi.fn(), + setHieraScanner: vi.fn(), + analyze: vi.fn().mockResolvedValue({ + unusedCode: { unusedClasses: [], unusedDefinedTypes: [], unusedHieraKeys: [] }, + lintIssues: [], + moduleUpdates: [], + statistics: { + totalManifests: 0, + totalClasses: 0, + totalDefinedTypes: 0, + totalFunctions: 0, + linesOfCode: 0, + mostUsedClasses: [], + mostUsedResources: [], + }, + analyzedAt: new Date().toISOString(), + }), + reload: vi.fn().mockResolvedValue(undefined), + clearCache: vi.fn(), +}; + +// Mock HieraService as a class +vi.mock("../../src/integrations/hiera/HieraService", () => { + return { + HieraService: class MockHieraService { + initialize = mockHieraService.initialize; + isInitialized = mockHieraService.isInitialized; + getAllKeys = mockHieraService.getAllKeys; + getHieraConfig = mockHieraService.getHieraConfig; + getScanner = mockHieraService.getScanner; + getFactService = mockHieraService.getFactService; + reloadControlRepo = mockHieraService.reloadControlRepo; + invalidateCache = mockHieraService.invalidateCache; + shutdown = mockHieraService.shutdown; + }, + }; +}); + +// Mock CodeAnalyzer as a class +vi.mock("../../src/integrations/hiera/CodeAnalyzer", () => { + return { + CodeAnalyzer: class MockCodeAnalyzer { + initialize = mockCodeAnalyzer.initialize; + isInitialized = mockCodeAnalyzer.isInitialized; + setIntegrationManager = mockCodeAnalyzer.setIntegrationManager; + setHieraScanner = mockCodeAnalyzer.setHieraScanner; + analyze = mockCodeAnalyzer.analyze; + reload = mockCodeAnalyzer.reload; + clearCache = mockCodeAnalyzer.clearCache; + }, + }; +}); + +describe("HieraPlugin", () => { + let plugin: HieraPlugin; + let mockIntegrationManager: IntegrationManager; + + beforeEach(() => { + vi.clearAllMocks(); + + // Reset mock implementations + mockHieraService.initialize.mockResolvedValue(undefined); + mockHieraService.isInitialized.mockReturnValue(true); + mockCodeAnalyzer.initialize.mockResolvedValue(undefined); + + // Create mock IntegrationManager + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(null), + } as unknown as IntegrationManager; + + plugin = new HieraPlugin(); + plugin.setIntegrationManager(mockIntegrationManager); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("constructor", () => { + it("should create plugin with correct name and type", () => { + expect(plugin.name).toBe("hiera"); + expect(plugin.type).toBe("information"); + }); + }); + + describe("validateControlRepository", () => { + it("should return invalid when path does not exist", () => { + vi.mocked(fs.existsSync).mockReturnValue(false); + + const result = plugin.validateControlRepository("/nonexistent/path"); + + expect(result.valid).toBe(false); + expect(result.errors).toContain("Control repository path does not exist: /nonexistent/path"); + }); + + it("should return invalid when path is not a directory", () => { + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => false, + } as fs.Stats); + + const result = plugin.validateControlRepository("/some/file"); + + expect(result.valid).toBe(false); + expect(result.errors).toContain("Control repository path is not a directory: /some/file"); + }); + + it("should return invalid when hiera.yaml is missing", () => { + vi.mocked(fs.existsSync).mockImplementation((p) => { + const pathStr = String(p); + if (pathStr === "/valid/repo") return true; + if (pathStr.includes("hiera.yaml")) return false; + return false; + }); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const result = plugin.validateControlRepository("/valid/repo"); + + expect(result.valid).toBe(false); + expect(result.errors.some(e => e.includes("hiera.yaml not found"))).toBe(true); + }); + + it("should return valid with warnings when optional directories are missing", () => { + vi.mocked(fs.existsSync).mockImplementation((p) => { + const pathStr = String(p); + if (pathStr === "/valid/repo") return true; + if (pathStr.includes("hiera.yaml")) return true; + return false; + }); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const result = plugin.validateControlRepository("/valid/repo"); + + expect(result.valid).toBe(true); + expect(result.warnings.length).toBeGreaterThan(0); + expect(result.structure.hasHieraYaml).toBe(true); + }); + + it("should detect all structure components when present", () => { + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const result = plugin.validateControlRepository("/valid/repo"); + + expect(result.valid).toBe(true); + expect(result.structure.hasHieraYaml).toBe(true); + expect(result.structure.hasHieradataDir).toBe(true); + expect(result.structure.hasManifestsDir).toBe(true); + expect(result.structure.hasPuppetfile).toBe(true); + }); + }); + + describe("initialize", () => { + it("should not initialize when disabled", async () => { + const config: IntegrationConfig = { + enabled: false, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/some/path", + }, + }; + + await plugin.initialize(config); + + expect(plugin.isInitialized()).toBe(false); + }); + + it("should not fully initialize when controlRepoPath is missing", async () => { + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "", + }, + }; + + await plugin.initialize(config); + + // Plugin is technically initialized but services are not set up + // The health check will report not configured + const health = await plugin.healthCheck(); + expect(health.healthy).toBe(false); + }); + + it("should throw error when control repo validation fails", async () => { + vi.mocked(fs.existsSync).mockReturnValue(false); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/nonexistent/path", + }, + }; + + await expect(plugin.initialize(config)).rejects.toThrow( + "Control repository validation failed" + ); + }); + + it("should initialize successfully with valid config", async () => { + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + hieraConfigPath: "hiera.yaml", + environments: ["production"], + factSources: { preferPuppetDB: true }, + cache: { enabled: true, ttl: 300000, maxEntries: 10000 }, + codeAnalysis: { + enabled: true, + lintEnabled: true, + moduleUpdateCheck: true, + analysisInterval: 3600000, + exclusionPatterns: [], + }, + }, + }; + + await plugin.initialize(config); + + expect(plugin.isInitialized()).toBe(true); + }); + }); + + describe("healthCheck", () => { + it("should return not initialized when plugin is not initialized", async () => { + const config: IntegrationConfig = { + enabled: false, + name: "hiera", + type: "information", + config: {}, + }; + + await plugin.initialize(config); + const health = await plugin.healthCheck(); + + expect(health.healthy).toBe(false); + // Base class returns "not initialized" when plugin is disabled + expect(health.message).toContain("not initialized"); + }); + + it("should return not initialized when integration is disabled", async () => { + const config: IntegrationConfig = { + enabled: false, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/some/path", + }, + }; + + await plugin.initialize(config); + const health = await plugin.healthCheck(); + + expect(health.healthy).toBe(false); + // Base class returns "not initialized" when plugin is disabled + expect(health.message).toContain("not initialized"); + }); + + it("should return healthy status when properly initialized", async () => { + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + }, + }; + + await plugin.initialize(config); + const health = await plugin.healthCheck(); + + expect(health.healthy).toBe(true); + expect(health.message).toContain("healthy"); + }); + }); + + describe("enable/disable", () => { + beforeEach(async () => { + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + }, + }; + + await plugin.initialize(config); + }); + + it("should disable the integration", async () => { + expect(plugin.isEnabled()).toBe(true); + + await plugin.disable(); + + expect(plugin.isEnabled()).toBe(false); + expect(plugin.isInitialized()).toBe(false); + }); + + it("should re-enable the integration", async () => { + await plugin.disable(); + expect(plugin.isEnabled()).toBe(false); + + await plugin.enable(); + + expect(plugin.isEnabled()).toBe(true); + expect(plugin.isInitialized()).toBe(true); + }); + }); + + describe("reload", () => { + beforeEach(async () => { + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + }, + }; + + await plugin.initialize(config); + }); + + it("should reload control repository data", async () => { + await expect(plugin.reload()).resolves.not.toThrow(); + expect(mockHieraService.reloadControlRepo).toHaveBeenCalled(); + }); + + it("should throw error when not initialized", async () => { + await plugin.disable(); + + await expect(plugin.reload()).rejects.toThrow("not initialized"); + }); + }); + + describe("getInventory", () => { + it("should return empty array when PuppetDB is not available", async () => { + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + }, + }; + + await plugin.initialize(config); + const inventory = await plugin.getInventory(); + + expect(inventory).toEqual([]); + }); + + it("should delegate to PuppetDB when available", async () => { + const mockNodes = [{ id: "node1", certname: "node1.example.com" }]; + const mockPuppetDB = { + isInitialized: vi.fn().mockReturnValue(true), + getInventory: vi.fn().mockResolvedValue(mockNodes), + }; + + mockIntegrationManager.getInformationSource = vi.fn().mockReturnValue(mockPuppetDB); + + // Mock valid control repo + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.statSync).mockReturnValue({ + isDirectory: () => true, + } as fs.Stats); + + const config: IntegrationConfig = { + enabled: true, + name: "hiera", + type: "information", + config: { + controlRepoPath: "/valid/repo", + }, + }; + + await plugin.initialize(config); + const inventory = await plugin.getInventory(); + + expect(mockPuppetDB.getInventory).toHaveBeenCalled(); + expect(inventory).toEqual(mockNodes); + }); + }); +}); diff --git a/backend/test/integrations/HieraScanner.test.ts b/backend/test/integrations/HieraScanner.test.ts new file mode 100644 index 0000000..7383745 --- /dev/null +++ b/backend/test/integrations/HieraScanner.test.ts @@ -0,0 +1,421 @@ +/** + * HieraScanner Unit Tests + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { HieraScanner } from "../../src/integrations/hiera/HieraScanner"; + +describe("HieraScanner", () => { + let scanner: HieraScanner; + let testDir: string; + + beforeEach(() => { + // Create a temporary test directory + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-scanner-test-")); + scanner = new HieraScanner(testDir, "data"); + }); + + afterEach(() => { + // Clean up test directory + scanner.stopWatching(); + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + /** + * Helper to create a test file + */ + function createTestFile(relativePath: string, content: string): void { + const fullPath = path.join(testDir, relativePath); + const dir = path.dirname(fullPath); + fs.mkdirSync(dir, { recursive: true }); + fs.writeFileSync(fullPath, content, "utf-8"); + } + + describe("scan", () => { + it("should scan an empty directory", async () => { + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + + const index = await scanner.scan(); + + expect(index.totalKeys).toBe(0); + expect(index.totalFiles).toBe(0); + expect(index.lastScan).toBeTruthy(); + }); + + it("should scan a single YAML file", async () => { + createTestFile("data/common.yaml", ` +profile::nginx::port: 8080 +profile::nginx::workers: 4 +`); + + const index = await scanner.scan(); + + expect(index.totalKeys).toBe(2); + expect(index.totalFiles).toBe(1); + expect(index.keys.has("profile::nginx::port")).toBe(true); + expect(index.keys.has("profile::nginx::workers")).toBe(true); + }); + + it("should scan multiple YAML files", async () => { + createTestFile("data/common.yaml", ` +common_key: common_value +`); + createTestFile("data/nodes/node1.yaml", ` +node_key: node_value +`); + + const index = await scanner.scan(); + + expect(index.totalKeys).toBe(2); + expect(index.totalFiles).toBe(2); + expect(index.keys.has("common_key")).toBe(true); + expect(index.keys.has("node_key")).toBe(true); + }); + + it("should scan JSON files", async () => { + createTestFile("data/common.json", JSON.stringify({ + "json_key": "json_value", + "another_key": 123 + })); + + const index = await scanner.scan(); + + expect(index.totalKeys).toBe(2); + expect(index.keys.has("json_key")).toBe(true); + expect(index.keys.has("another_key")).toBe(true); + }); + + it("should handle non-existent directory gracefully", async () => { + scanner = new HieraScanner(testDir, "nonexistent"); + + const index = await scanner.scan(); + + expect(index.totalKeys).toBe(0); + expect(index.totalFiles).toBe(0); + }); + }); + + + describe("nested key support", () => { + it("should extract nested keys with dot notation", async () => { + createTestFile("data/common.yaml", ` +profile: + nginx: + port: 8080 + workers: 4 +`); + + const index = await scanner.scan(); + + // Should have both the parent and nested keys + expect(index.keys.has("profile")).toBe(true); + expect(index.keys.has("profile.nginx")).toBe(true); + expect(index.keys.has("profile.nginx.port")).toBe(true); + expect(index.keys.has("profile.nginx.workers")).toBe(true); + }); + + it("should handle deeply nested structures", async () => { + createTestFile("data/common.yaml", ` +level1: + level2: + level3: + level4: + value: deep +`); + + const index = await scanner.scan(); + + expect(index.keys.has("level1.level2.level3.level4.value")).toBe(true); + const key = index.keys.get("level1.level2.level3.level4.value"); + expect(key?.locations[0].value).toBe("deep"); + }); + + it("should handle Puppet-style double-colon keys", async () => { + createTestFile("data/common.yaml", ` +"profile::nginx::port": 8080 +"profile::nginx::workers": 4 +`); + + const index = await scanner.scan(); + + expect(index.keys.has("profile::nginx::port")).toBe(true); + expect(index.keys.has("profile::nginx::workers")).toBe(true); + }); + }); + + describe("multi-occurrence tracking", () => { + it("should track key in multiple files", async () => { + createTestFile("data/common.yaml", ` +shared_key: common_value +`); + createTestFile("data/nodes/node1.yaml", ` +shared_key: node_value +`); + + const index = await scanner.scan(); + + const key = index.keys.get("shared_key"); + expect(key).toBeDefined(); + expect(key?.locations.length).toBe(2); + + const values = key?.locations.map(loc => loc.value); + expect(values).toContain("common_value"); + expect(values).toContain("node_value"); + }); + + it("should track file path for each occurrence", async () => { + createTestFile("data/common.yaml", ` +shared_key: common_value +`); + createTestFile("data/os/RedHat.yaml", ` +shared_key: redhat_value +`); + + const index = await scanner.scan(); + + const key = index.keys.get("shared_key"); + const files = key?.locations.map(loc => loc.file); + + expect(files).toContain("data/common.yaml"); + expect(files).toContain("data/os/RedHat.yaml"); + }); + + it("should track hierarchy level for each occurrence", async () => { + createTestFile("data/common.yaml", ` +shared_key: common_value +`); + createTestFile("data/nodes/node1.yaml", ` +shared_key: node_value +`); + + const index = await scanner.scan(); + + const key = index.keys.get("shared_key"); + const levels = key?.locations.map(loc => loc.hierarchyLevel); + + expect(levels).toContain("Common data"); + expect(levels).toContain("Per-node data"); + }); + }); + + describe("searchKeys", () => { + beforeEach(async () => { + createTestFile("data/common.yaml", ` +profile::nginx::port: 8080 +profile::nginx::workers: 4 +profile::apache::port: 80 +database::mysql::port: 3306 +`); + await scanner.scan(); + }); + + it("should find keys by partial match", () => { + const results = scanner.searchKeys("nginx"); + + expect(results.length).toBe(2); + expect(results.map(k => k.name)).toContain("profile::nginx::port"); + expect(results.map(k => k.name)).toContain("profile::nginx::workers"); + }); + + it("should be case-insensitive", () => { + const results = scanner.searchKeys("NGINX"); + + expect(results.length).toBe(2); + }); + + it("should return all keys for empty query", () => { + const results = scanner.searchKeys(""); + + expect(results.length).toBe(4); + }); + + it("should return empty array for no matches", () => { + const results = scanner.searchKeys("nonexistent"); + + expect(results.length).toBe(0); + }); + + it("should find keys by suffix", () => { + const results = scanner.searchKeys("port"); + + expect(results.length).toBe(3); + }); + }); + + + describe("parseFileContent", () => { + it("should parse valid YAML content", () => { + const content = ` +key1: value1 +key2: 123 +key3: true +`; + const result = scanner.parseFileContent(content, "test.yaml"); + + expect(result.success).toBe(true); + expect(result.keys.size).toBe(3); + }); + + it("should handle invalid YAML gracefully", () => { + const content = `invalid: yaml: content:`; + const result = scanner.parseFileContent(content, "test.yaml"); + + expect(result.success).toBe(false); + expect(result.error).toContain("YAML parse error"); + }); + + it("should handle empty content", () => { + const result = scanner.parseFileContent("", "test.yaml"); + + expect(result.success).toBe(true); + expect(result.keys.size).toBe(0); + }); + + it("should extract lookup_options", () => { + const content = ` +profile::packages: + - vim + - git +lookup_options: + profile::packages: + merge: unique +`; + const result = scanner.parseFileContent(content, "test.yaml"); + + expect(result.success).toBe(true); + expect(result.lookupOptions.has("profile::packages")).toBe(true); + expect(result.lookupOptions.get("profile::packages")?.merge).toBe("unique"); + }); + + it("should not include lookup_options as a key", () => { + const content = ` +real_key: value +lookup_options: + real_key: + merge: deep +`; + const result = scanner.parseFileContent(content, "test.yaml"); + + expect(result.success).toBe(true); + expect(result.keys.has("real_key")).toBe(true); + expect(result.keys.has("lookup_options")).toBe(false); + }); + }); + + describe("hierarchy level detection", () => { + it("should detect common data level", async () => { + createTestFile("data/common.yaml", `key: value`); + await scanner.scan(); + + const fileInfo = scanner.getKeyIndex().files.get("data/common.yaml"); + expect(fileInfo?.hierarchyLevel).toBe("Common data"); + }); + + it("should detect per-node data level", async () => { + createTestFile("data/nodes/node1.yaml", `key: value`); + await scanner.scan(); + + const fileInfo = scanner.getKeyIndex().files.get("data/nodes/node1.yaml"); + expect(fileInfo?.hierarchyLevel).toBe("Per-node data"); + }); + + it("should detect per-OS data level", async () => { + createTestFile("data/os/RedHat.yaml", `key: value`); + await scanner.scan(); + + const fileInfo = scanner.getKeyIndex().files.get("data/os/RedHat.yaml"); + expect(fileInfo?.hierarchyLevel).toBe("Per-OS data"); + }); + + it("should detect per-environment data level", async () => { + createTestFile("data/environments/production.yaml", `key: value`); + await scanner.scan(); + + const fileInfo = scanner.getKeyIndex().files.get("data/environments/production.yaml"); + expect(fileInfo?.hierarchyLevel).toBe("Per-environment data"); + }); + }); + + describe("file watching", () => { + it("should start watching for changes", () => { + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + + scanner.watchForChanges(() => {}); + + expect(scanner.isWatchingForChanges()).toBe(true); + }); + + it("should stop watching", () => { + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + + scanner.watchForChanges(() => {}); + scanner.stopWatching(); + + expect(scanner.isWatchingForChanges()).toBe(false); + }); + }); + + describe("cache invalidation", () => { + it("should invalidate specific files", async () => { + createTestFile("data/common.yaml", `key1: value1`); + createTestFile("data/other.yaml", `key2: value2`); + await scanner.scan(); + + expect(scanner.getKeyIndex().keys.has("key1")).toBe(true); + expect(scanner.getKeyIndex().keys.has("key2")).toBe(true); + + scanner.invalidateFiles(["data/common.yaml"]); + + expect(scanner.getKeyIndex().keys.has("key1")).toBe(false); + expect(scanner.getKeyIndex().keys.has("key2")).toBe(true); + }); + + it("should rescan files after invalidation", async () => { + createTestFile("data/common.yaml", `key1: value1`); + await scanner.scan(); + + // Modify the file + createTestFile("data/common.yaml", `key1: updated_value`); + + await scanner.rescanFiles(["data/common.yaml"]); + + const key = scanner.getKey("key1"); + expect(key?.locations[0].value).toBe("updated_value"); + }); + }); + + describe("getKey and getAllKeys", () => { + beforeEach(async () => { + createTestFile("data/common.yaml", ` +key1: value1 +key2: value2 +`); + await scanner.scan(); + }); + + it("should get a specific key", () => { + const key = scanner.getKey("key1"); + + expect(key).toBeDefined(); + expect(key?.name).toBe("key1"); + expect(key?.locations[0].value).toBe("value1"); + }); + + it("should return undefined for non-existent key", () => { + const key = scanner.getKey("nonexistent"); + + expect(key).toBeUndefined(); + }); + + it("should get all keys", () => { + const keys = scanner.getAllKeys(); + + expect(keys.length).toBe(2); + expect(keys.map(k => k.name)).toContain("key1"); + expect(keys.map(k => k.name)).toContain("key2"); + }); + }); +}); diff --git a/backend/test/integrations/HieraService.test.ts b/backend/test/integrations/HieraService.test.ts new file mode 100644 index 0000000..c1ae4b6 --- /dev/null +++ b/backend/test/integrations/HieraService.test.ts @@ -0,0 +1,524 @@ +/** + * HieraService Unit Tests + * + * Tests for the HieraService class that orchestrates Hiera operations + * with caching support. + */ + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { HieraService, type HieraServiceConfig } from "../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../src/integrations/IntegrationManager"; + +describe("HieraService", () => { + let service: HieraService; + let integrationManager: IntegrationManager; + let testDir: string; + let config: HieraServiceConfig; + + beforeEach(() => { + // Create a temporary test directory + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-service-test-")); + + // Create test control repo structure + createTestControlRepo(testDir); + + // Create integration manager + integrationManager = new IntegrationManager(); + + // Create service config + config = { + controlRepoPath: testDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(testDir, "facts"), + }, + cache: { + enabled: true, + ttl: 300000, // 5 minutes + maxEntries: 1000, + }, + }; + + service = new HieraService(integrationManager, config); + }); + + afterEach(async () => { + // Shutdown service + if (service.isInitialized()) { + await service.shutdown(); + } + + // Clean up test directory + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + describe("initialization", () => { + it("should initialize successfully with valid config", async () => { + await service.initialize(); + + expect(service.isInitialized()).toBe(true); + expect(service.getHieraConfig()).not.toBeNull(); + expect(service.getHieraConfig()?.version).toBe(5); + }); + + it("should throw error if hiera.yaml is invalid", async () => { + // Write invalid hiera.yaml + fs.writeFileSync( + path.join(testDir, "hiera.yaml"), + "version: 3\nhierarchy: []" + ); + + await expect(service.initialize()).rejects.toThrow("Unsupported Hiera version"); + }); + + it("should throw error if hiera.yaml is missing", async () => { + // Remove hiera.yaml + fs.unlinkSync(path.join(testDir, "hiera.yaml")); + + await expect(service.initialize()).rejects.toThrow(); + }); + }); + + describe("getAllKeys", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should return all discovered keys", async () => { + const keyIndex = await service.getAllKeys(); + + expect(keyIndex.totalKeys).toBeGreaterThan(0); + expect(keyIndex.keys.has("profile::nginx::port")).toBe(true); + expect(keyIndex.keys.has("profile::nginx::workers")).toBe(true); + }); + + it("should cache key index", async () => { + // First call + const keyIndex1 = await service.getAllKeys(); + + // Second call should return cached result + const keyIndex2 = await service.getAllKeys(); + + expect(keyIndex1).toBe(keyIndex2); + }); + }); + + describe("searchKeys", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should find keys matching query", async () => { + const results = await service.searchKeys("nginx"); + + expect(results.length).toBeGreaterThan(0); + expect(results.every(k => k.name.includes("nginx"))).toBe(true); + }); + + it("should be case-insensitive", async () => { + const results = await service.searchKeys("NGINX"); + + expect(results.length).toBeGreaterThan(0); + }); + + it("should return all keys for empty query", async () => { + const allKeys = await service.getAllKeys(); + const results = await service.searchKeys(""); + + expect(results.length).toBe(allKeys.totalKeys); + }); + }); + + describe("getKey", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should return key details for existing key", async () => { + const key = await service.getKey("profile::nginx::port"); + + expect(key).toBeDefined(); + expect(key?.name).toBe("profile::nginx::port"); + expect(key?.locations.length).toBeGreaterThan(0); + }); + + it("should return undefined for non-existent key", async () => { + const key = await service.getKey("nonexistent::key"); + + expect(key).toBeUndefined(); + }); + }); + + describe("resolveKey", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should resolve key for a node", async () => { + const resolution = await service.resolveKey("node1.example.com", "profile::nginx::port"); + + expect(resolution.key).toBe("profile::nginx::port"); + expect(resolution.found).toBe(true); + // Node-specific value (9090) should override common value (8080) + expect(resolution.resolvedValue).toBe(9090); + }); + + it("should return not found for missing key", async () => { + const resolution = await service.resolveKey("node1.example.com", "nonexistent::key"); + + expect(resolution.found).toBe(false); + expect(resolution.resolvedValue).toBeUndefined(); + }); + + it("should cache resolution results", async () => { + // First call + await service.resolveKey("node1.example.com", "profile::nginx::port"); + + // Check cache stats + const stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBeGreaterThan(0); + }); + }); + + describe("resolveAllKeys", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should resolve all keys for a node", async () => { + const resolutions = await service.resolveAllKeys("node1.example.com"); + + expect(resolutions.size).toBeGreaterThan(0); + expect(resolutions.has("profile::nginx::port")).toBe(true); + }); + }); + + describe("getNodeHieraData", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should return complete node data", async () => { + const nodeData = await service.getNodeHieraData("node1.example.com"); + + expect(nodeData.nodeId).toBe("node1.example.com"); + expect(nodeData.facts).toBeDefined(); + expect(nodeData.keys.size).toBeGreaterThan(0); + }); + + it("should cache node data", async () => { + // First call + await service.getNodeHieraData("node1.example.com"); + + // Check cache stats + const stats = service.getCacheStats(); + expect(stats.nodeDataCacheSize).toBeGreaterThan(0); + }); + + it("should include usedKeys and unusedKeys sets", async () => { + const nodeData = await service.getNodeHieraData("node1.example.com"); + + // Without PuppetDB, all keys should be marked as unused + expect(nodeData.usedKeys).toBeInstanceOf(Set); + expect(nodeData.unusedKeys).toBeInstanceOf(Set); + + // Total of used + unused should equal total keys + const totalClassified = nodeData.usedKeys.size + nodeData.unusedKeys.size; + expect(totalClassified).toBe(nodeData.keys.size); + }); + + it("should mark all keys as unused when PuppetDB is not available", async () => { + const nodeData = await service.getNodeHieraData("node1.example.com"); + + // Without PuppetDB integration, all keys should be unused + expect(nodeData.unusedKeys.size).toBe(nodeData.keys.size); + expect(nodeData.usedKeys.size).toBe(0); + }); + }); + + describe("getKeyValuesAcrossNodes", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should return key values for all available nodes", async () => { + const results = await service.getKeyValuesAcrossNodes("profile::nginx::port"); + + expect(results.length).toBeGreaterThan(0); + + // Each result should have required fields + for (const result of results) { + expect(result.nodeId).toBeDefined(); + expect(typeof result.found).toBe("boolean"); + if (result.found) { + expect(result.sourceFile).toBeDefined(); + expect(result.hierarchyLevel).toBeDefined(); + } + } + }); + + it("should include source file info for each node", async () => { + const results = await service.getKeyValuesAcrossNodes("profile::nginx::port"); + + // Find a result where the key was found + const foundResult = results.find(r => r.found); + expect(foundResult).toBeDefined(); + expect(foundResult?.sourceFile).toBeTruthy(); + expect(foundResult?.hierarchyLevel).toBeTruthy(); + }); + + it("should return different values for different nodes", async () => { + const results = await service.getKeyValuesAcrossNodes("profile::nginx::port"); + + // node1 has port 9090, common has 8080 + const node1Result = results.find(r => r.nodeId === "node1.example.com"); + const node2Result = results.find(r => r.nodeId === "node2.example.com"); + + expect(node1Result?.value).toBe(9090); + expect(node2Result?.value).toBe(8080); // Falls back to common + }); + + it("should indicate when key is not found for a node", async () => { + const results = await service.getKeyValuesAcrossNodes("nonexistent::key"); + + // All results should have found=false + for (const result of results) { + expect(result.found).toBe(false); + } + }); + }); + + describe("cache management", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should invalidate all caches", async () => { + // Populate caches + await service.getAllKeys(); + await service.resolveKey("node1.example.com", "profile::nginx::port"); + await service.getNodeHieraData("node1.example.com"); + + // Verify caches are populated + let stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(true); + expect(stats.resolutionCacheSize).toBeGreaterThan(0); + expect(stats.nodeDataCacheSize).toBeGreaterThan(0); + + // Invalidate + service.invalidateCache(); + + // Verify caches are cleared + stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(false); + expect(stats.resolutionCacheSize).toBe(0); + expect(stats.nodeDataCacheSize).toBe(0); + }); + + it("should invalidate cache for specific node", async () => { + // Populate caches for two nodes + await service.resolveKey("node1.example.com", "profile::nginx::port"); + await service.resolveKey("node2.example.com", "profile::nginx::port"); + await service.getNodeHieraData("node1.example.com"); + await service.getNodeHieraData("node2.example.com"); + + // Invalidate node1 cache + service.invalidateNodeCache("node1.example.com"); + + // Verify node1 cache is cleared but node2 remains + const stats = service.getCacheStats(); + expect(stats.nodeDataCacheSize).toBe(1); + }); + + it("should return correct cache statistics", async () => { + const stats = service.getCacheStats(); + + expect(stats.enabled).toBe(true); + expect(stats.ttl).toBe(300000); + expect(stats.maxEntries).toBe(1000); + }); + + it("should cache parsed hieradata", async () => { + // First call should populate cache + await service.getAllKeys(); + + let stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(true); + + // Second call should use cache (same reference) + const keys1 = await service.getAllKeys(); + const keys2 = await service.getAllKeys(); + expect(keys1).toBe(keys2); + }); + + it("should cache resolved values per node", async () => { + // First resolution + await service.resolveKey("node1.example.com", "profile::nginx::port"); + + let stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(1); + + // Second resolution for same key should use cache + await service.resolveKey("node1.example.com", "profile::nginx::port"); + + stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(1); // Still 1, not 2 + + // Different key should add to cache + await service.resolveKey("node1.example.com", "profile::nginx::workers"); + + stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(2); + }); + }); + + describe("reloadControlRepo", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should reload and invalidate caches", async () => { + // Populate caches + await service.getAllKeys(); + await service.resolveKey("node1.example.com", "profile::nginx::port"); + + // Reload + await service.reloadControlRepo(); + + // Verify caches are cleared + const stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(0); + expect(stats.nodeDataCacheSize).toBe(0); + }); + }); + + describe("component accessors", () => { + beforeEach(async () => { + await service.initialize(); + }); + + it("should provide access to parser", () => { + expect(service.getParser()).toBeDefined(); + }); + + it("should provide access to scanner", () => { + expect(service.getScanner()).toBeDefined(); + }); + + it("should provide access to resolver", () => { + expect(service.getResolver()).toBeDefined(); + }); + + it("should provide access to fact service", () => { + expect(service.getFactService()).toBeDefined(); + }); + }); + + describe("error handling", () => { + it("should throw error when not initialized", async () => { + await expect(service.getAllKeys()).rejects.toThrow("not initialized"); + }); + }); + + describe("shutdown", () => { + it("should clean up resources on shutdown", async () => { + await service.initialize(); + + // Populate caches + await service.getAllKeys(); + + // Shutdown + await service.shutdown(); + + expect(service.isInitialized()).toBe(false); + }); + }); +}); + +/** + * Create a test control repository structure + */ +function createTestControlRepo(testDir: string): void { + // Create directories + fs.mkdirSync(path.join(testDir, "data", "nodes"), { recursive: true }); + fs.mkdirSync(path.join(testDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Per-node data" + path: "nodes/%{facts.networking.hostname}.yaml" + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(testDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml + const commonData = ` +profile::nginx::port: 8080 +profile::nginx::workers: 4 +profile::base::packages: + - vim + - curl + - wget +`; + fs.writeFileSync(path.join(testDir, "data", "common.yaml"), commonData); + + // Create node-specific data + const node1Data = ` +profile::nginx::port: 9090 +profile::nginx::ssl_enabled: true +`; + fs.writeFileSync(path.join(testDir, "data", "nodes", "node1.yaml"), node1Data); + + const node2Data = ` +profile::nginx::workers: 8 +`; + fs.writeFileSync(path.join(testDir, "data", "nodes", "node2.yaml"), node2Data); + + // Create local fact files + const node1Facts = { + name: "node1.example.com", + values: { + networking: { + hostname: "node1", + fqdn: "node1.example.com", + }, + os: { + family: "RedHat", + name: "CentOS", + }, + }, + }; + fs.writeFileSync( + path.join(testDir, "facts", "node1.example.com.json"), + JSON.stringify(node1Facts, null, 2) + ); + + const node2Facts = { + name: "node2.example.com", + values: { + networking: { + hostname: "node2", + fqdn: "node2.example.com", + }, + os: { + family: "Debian", + name: "Ubuntu", + }, + }, + }; + fs.writeFileSync( + path.join(testDir, "facts", "node2.example.com.json"), + JSON.stringify(node2Facts, null, 2) + ); +} diff --git a/backend/test/integrations/PuppetfileParser.test.ts b/backend/test/integrations/PuppetfileParser.test.ts new file mode 100644 index 0000000..6467cc8 --- /dev/null +++ b/backend/test/integrations/PuppetfileParser.test.ts @@ -0,0 +1,305 @@ +/** + * PuppetfileParser Unit Tests + * + * Tests for the PuppetfileParser class that parses Puppetfile + * to extract module dependencies. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { PuppetfileParser } from "../../src/integrations/hiera/PuppetfileParser"; + +describe("PuppetfileParser", () => { + let parser: PuppetfileParser; + let testDir: string; + + beforeEach(() => { + parser = new PuppetfileParser(); + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "puppetfile-test-")); + }); + + afterEach(() => { + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + describe("parse", () => { + it("should parse simple forge modules", () => { + const content = ` +mod 'puppetlabs/stdlib', '8.0.0' +mod 'puppetlabs/concat', '7.0.0' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules).toHaveLength(2); + expect(result.modules[0].name).toBe("puppetlabs/stdlib"); + expect(result.modules[0].version).toBe("8.0.0"); + expect(result.modules[0].source).toBe("forge"); + expect(result.modules[1].name).toBe("puppetlabs/concat"); + expect(result.modules[1].version).toBe("7.0.0"); + }); + + it("should parse forge modules with hyphen format", () => { + const content = `mod 'puppetlabs-stdlib', '8.0.0'`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].name).toBe("puppetlabs/stdlib"); + expect(result.modules[0].forgeSlug).toBe("puppetlabs-stdlib"); + }); + + it("should parse forge modules without version", () => { + const content = `mod 'puppetlabs/stdlib'`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].version).toBe("latest"); + expect(result.warnings).toHaveLength(1); + expect(result.warnings[0]).toContain("no version specified"); + }); + + it("should parse git modules with tag", () => { + const content = ` +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git', + :tag => 'v1.0.0' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].name).toBe("custom_module"); + expect(result.modules[0].version).toBe("v1.0.0"); + expect(result.modules[0].source).toBe("git"); + expect(result.modules[0].gitUrl).toBe("https://github.com/example/custom_module.git"); + expect(result.modules[0].gitTag).toBe("v1.0.0"); + }); + + it("should parse git modules with branch", () => { + const content = ` +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git', + :branch => 'main' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].version).toBe("main"); + expect(result.modules[0].gitBranch).toBe("main"); + }); + + it("should parse git modules with commit", () => { + const content = ` +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git', + :commit => 'abc123' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].version).toBe("abc123"); + expect(result.modules[0].gitCommit).toBe("abc123"); + }); + + it("should parse git modules without ref (defaults to HEAD)", () => { + const content = ` +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].version).toBe("HEAD"); + }); + + it("should parse local modules", () => { + const content = `mod 'local_module', :local => true`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules[0].name).toBe("local_module"); + expect(result.modules[0].version).toBe("local"); + }); + + it("should parse forge directive", () => { + const content = ` +forge 'https://forge.puppet.com' +mod 'puppetlabs/stdlib', '8.0.0' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.forgeUrl).toBe("https://forge.puppet.com"); + }); + + it("should parse moduledir directive", () => { + const content = ` +moduledir '.modules' +mod 'puppetlabs/stdlib', '8.0.0' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.moduledir).toBe(".modules"); + }); + + it("should skip comments", () => { + const content = ` +# This is a comment +mod 'puppetlabs/stdlib', '8.0.0' +# Another comment +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.modules).toHaveLength(1); + }); + + it("should track line numbers", () => { + const content = ` +mod 'puppetlabs/stdlib', '8.0.0' + +mod 'puppetlabs/concat', '7.0.0' +`; + const result = parser.parse(content); + + expect(result.modules[0].line).toBe(2); + expect(result.modules[1].line).toBe(4); + }); + }); + + describe("error handling", () => { + it("should report error for invalid module declaration", () => { + const content = `mod invalid syntax here`; + const result = parser.parse(content); + + expect(result.success).toBe(false); + expect(result.errors).toHaveLength(1); + expect(result.errors[0].message).toContain("Failed to parse"); + expect(result.errors[0].line).toBe(1); + }); + + it("should report error for unclosed multi-line module", () => { + const content = ` +mod 'custom_module', + :git => 'https://github.com/example/custom_module.git', +`; + const result = parser.parse(content); + + expect(result.success).toBe(false); + expect(result.errors.some((e) => e.message.includes("Unclosed"))).toBe(true); + }); + + it("should warn about unknown directives", () => { + const content = ` +unknown_directive 'value' +mod 'puppetlabs/stdlib', '8.0.0' +`; + const result = parser.parse(content); + + expect(result.success).toBe(true); + expect(result.warnings.some((w) => w.includes("Unknown directive"))).toBe(true); + }); + + it("should handle file read errors", () => { + const result = parser.parseFile("/nonexistent/path/Puppetfile"); + + expect(result.success).toBe(false); + expect(result.errors).toHaveLength(1); + expect(result.errors[0].message).toContain("Failed to read"); + }); + }); + + describe("parseFile", () => { + it("should parse a Puppetfile from disk", () => { + const puppetfilePath = path.join(testDir, "Puppetfile"); + fs.writeFileSync( + puppetfilePath, + ` +forge 'https://forge.puppet.com' +mod 'puppetlabs/stdlib', '8.0.0' +` + ); + + const result = parser.parseFile(puppetfilePath); + + expect(result.success).toBe(true); + expect(result.modules).toHaveLength(1); + expect(result.forgeUrl).toBe("https://forge.puppet.com"); + }); + }); + + describe("toModuleUpdates", () => { + it("should convert parsed modules to ModuleUpdate format", () => { + const content = ` +mod 'puppetlabs/stdlib', '8.0.0' +mod 'custom_module', :git => 'https://github.com/example/custom_module.git', :tag => 'v1.0.0' +`; + const result = parser.parse(content); + const updates = parser.toModuleUpdates(result.modules); + + expect(updates).toHaveLength(2); + expect(updates[0].name).toBe("puppetlabs/stdlib"); + expect(updates[0].currentVersion).toBe("8.0.0"); + expect(updates[0].source).toBe("forge"); + expect(updates[1].name).toBe("custom_module"); + expect(updates[1].source).toBe("git"); + }); + }); + + describe("getErrorSummary", () => { + it("should return null for successful parse", () => { + const content = `mod 'puppetlabs/stdlib', '8.0.0'`; + const result = parser.parse(content); + const summary = parser.getErrorSummary(result); + + expect(summary).toBeNull(); + }); + + it("should return formatted error summary", () => { + const content = `mod invalid syntax`; + const result = parser.parse(content); + const summary = parser.getErrorSummary(result); + + expect(summary).not.toBeNull(); + expect(summary).toContain("Puppetfile parse errors"); + expect(summary).toContain("Line 1"); + }); + }); + + describe("validate", () => { + it("should validate a valid Puppetfile", () => { + const puppetfilePath = path.join(testDir, "Puppetfile"); + fs.writeFileSync(puppetfilePath, `mod 'puppetlabs/stdlib', '8.0.0'`); + + const result = parser.validate(puppetfilePath); + + expect(result.valid).toBe(true); + expect(result.modules).toHaveLength(1); + }); + + it("should report validation issues for unpinned versions", () => { + const puppetfilePath = path.join(testDir, "Puppetfile"); + fs.writeFileSync(puppetfilePath, `mod 'puppetlabs/stdlib'`); + + const result = parser.validate(puppetfilePath); + + expect(result.valid).toBe(true); // Still valid, just has warnings + expect(result.issues.some((i) => i.message.includes("no version pinned"))).toBe(true); + }); + + it("should report validation issues for git modules without ref", () => { + const puppetfilePath = path.join(testDir, "Puppetfile"); + fs.writeFileSync( + puppetfilePath, + `mod 'custom', :git => 'https://github.com/example/custom.git'` + ); + + const result = parser.validate(puppetfilePath); + + expect(result.valid).toBe(true); + expect(result.issues.some((i) => i.message.includes("no tag, branch, or commit"))).toBe(true); + }); + }); +}); diff --git a/backend/test/properties/hiera/property-10.test.ts b/backend/test/properties/hiera/property-10.test.ts new file mode 100644 index 0000000..aa628f6 --- /dev/null +++ b/backend/test/properties/hiera/property-10.test.ts @@ -0,0 +1,341 @@ +/** + * Feature: hiera-codebase-integration, Property 10: Hiera Resolution Correctness + * Validates: Requirements 5.1, 5.2, 5.3, 5.4 + * + * This property test verifies that: + * For any Hiera key, fact set, and hierarchy configuration, the Hiera_Resolver SHALL: + * - Apply the correct lookup method (first, unique, hash, deep) based on lookup_options + * - Return the value from the first matching hierarchy level (for 'first' lookup) + * - Merge values according to the specified merge strategy (for merge lookups) + * - Track which hierarchy level provided the final/winning value + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraResolver } from "../../../src/integrations/hiera/HieraResolver"; +import type { + HieraConfig, + Facts, +} from "../../../src/integrations/hiera/types"; + +describe("Property 10: Hiera Resolution Correctness", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key names + const keyNameArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Generator for simple values (strings, numbers, booleans) + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{") && !s.includes(":")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Generator for array values + const arrayValueArb = fc.array(simpleValueArb, { minLength: 1, maxLength: 5 }); + + // Generator for hash values with simple string keys + const hashKeyArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + const hashValueArb = fc.dictionary( + hashKeyArb, + simpleValueArb, + { minKeys: 1, maxKeys: 5 } + ); + + // Generator for facts + const factsArb: fc.Arbitrary = fc.record({ + nodeId: fc.constant("test-node"), + gatheredAt: fc.constant(new Date().toISOString()), + facts: fc.record({ + hostname: fc.constant("test-host"), + os: fc.record({ + family: fc.constantFrom("RedHat", "Debian", "Windows"), + name: fc.constantFrom("CentOS", "Ubuntu", "Windows"), + }), + }), + }); + + // Helper to create a temp directory and resolver + function createTestEnvironment(): { tempDir: string; resolver: HieraResolver } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-resolver-test-")); + const resolver = new HieraResolver(tempDir); + return { tempDir, resolver }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + // Helper to create a hieradata file + function createHieradataFile( + tempDir: string, + filePath: string, + data: Record + ): void { + const fullPath = path.join(tempDir, filePath); + fs.mkdirSync(path.dirname(fullPath), { recursive: true }); + fs.writeFileSync(fullPath, yaml.stringify(data)); + } + + // Helper to create a basic hierarchy config + function createBasicConfig(levels: string[]): HieraConfig { + return { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: levels.map((name, index) => ({ + name, + path: `level${index}/data.yaml`, + })), + }; + } + + it("should return the first matching value for 'first' lookup method", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, simpleValueArb, simpleValueArb, factsArb, async (key, value1, value2, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create two hierarchy levels with different values + createHieradataFile(tempDir, "data/level0/data.yaml", { [key]: value1 }); + createHieradataFile(tempDir, "data/level1/data.yaml", { [key]: value2 }); + + const config = createBasicConfig(["Level 0", "Level 1"]); + + const result = await resolver.resolve(key, facts, config, { + lookupMethod: "first", + }); + + // Should find the key + expect(result.found).toBe(true); + // Should return the first value (from level 0) + expect(result.resolvedValue).toEqual(value1); + // Should track the source + expect(result.hierarchyLevel).toBe("Level 0"); + expect(result.sourceFile).toContain("level0"); + // Should have all values recorded + expect(result.allValues.length).toBe(2); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should merge arrays with unique values for 'unique' lookup method", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, arrayValueArb, arrayValueArb, factsArb, async (key, arr1, arr2, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create two hierarchy levels with array values + createHieradataFile(tempDir, "data/level0/data.yaml", { [key]: arr1 }); + createHieradataFile(tempDir, "data/level1/data.yaml", { [key]: arr2 }); + + const config = createBasicConfig(["Level 0", "Level 1"]); + + const result = await resolver.resolve(key, facts, config, { + lookupMethod: "unique", + }); + + expect(result.found).toBe(true); + expect(Array.isArray(result.resolvedValue)).toBe(true); + + const resolvedArray = result.resolvedValue as unknown[]; + + // All items from arr1 should be present + for (const item of arr1) { + expect(resolvedArray.some((r) => JSON.stringify(r) === JSON.stringify(item))).toBe(true); + } + + // Items from arr2 should be present (if not duplicates) + for (const item of arr2) { + const isDuplicate = arr1.some((a) => JSON.stringify(a) === JSON.stringify(item)); + if (!isDuplicate) { + expect(resolvedArray.some((r) => JSON.stringify(r) === JSON.stringify(item))).toBe(true); + } + } + + // No duplicates in result + const uniqueItems = new Set(resolvedArray.map((r) => JSON.stringify(r))); + expect(uniqueItems.size).toBe(resolvedArray.length); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should merge hashes for 'hash' lookup method with higher priority winning", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, hashValueArb, hashValueArb, factsArb, async (key, hash1, hash2, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create two hierarchy levels with hash values + createHieradataFile(tempDir, "data/level0/data.yaml", { [key]: hash1 }); + createHieradataFile(tempDir, "data/level1/data.yaml", { [key]: hash2 }); + + const config = createBasicConfig(["Level 0", "Level 1"]); + + const result = await resolver.resolve(key, facts, config, { + lookupMethod: "hash", + }); + + expect(result.found).toBe(true); + expect(typeof result.resolvedValue).toBe("object"); + expect(Array.isArray(result.resolvedValue)).toBe(false); + + const resolvedHash = result.resolvedValue as Record; + + // Keys from hash1 (higher priority) should have their values + for (const [k, v] of Object.entries(hash1)) { + expect(resolvedHash[k]).toEqual(v); + } + + // Keys only in hash2 should also be present + for (const [k, v] of Object.entries(hash2)) { + if (!(k in hash1)) { + expect(resolvedHash[k]).toEqual(v); + } + } + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should track all values from all hierarchy levels", async () => { + await fc.assert( + fc.asyncProperty( + keyNameArb, + fc.array(simpleValueArb, { minLength: 2, maxLength: 4 }), + factsArb, + async (key, values, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hierarchy levels with different values + const levelNames: string[] = []; + for (let i = 0; i < values.length; i++) { + createHieradataFile(tempDir, `data/level${i}/data.yaml`, { [key]: values[i] }); + levelNames.push(`Level ${i}`); + } + + const config = createBasicConfig(levelNames); + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + // Should have recorded all values + expect(result.allValues.length).toBe(values.length); + + // Each value should be tracked with its source + for (let i = 0; i < values.length; i++) { + const location = result.allValues[i]; + expect(location.value).toEqual(values[i]); + expect(location.hierarchyLevel).toBe(`Level ${i}`); + expect(location.file).toContain(`level${i}`); + } + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should apply lookup_options from hieradata files", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, arrayValueArb, arrayValueArb, factsArb, async (key, arr1, arr2, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with lookup_options specifying 'unique' merge + createHieradataFile(tempDir, "data/level0/data.yaml", { + lookup_options: { + [key]: { merge: "unique" }, + }, + [key]: arr1, + }); + createHieradataFile(tempDir, "data/level1/data.yaml", { [key]: arr2 }); + + const config = createBasicConfig(["Level 0", "Level 1"]); + + // Don't specify lookup method - should use lookup_options + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + expect(result.lookupMethod).toBe("unique"); + expect(Array.isArray(result.resolvedValue)).toBe(true); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should support knockout_prefix for deep merges", async () => { + await fc.assert( + fc.asyncProperty(factsArb, async (facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + const key = "test_hash"; + const knockoutPrefix = "--"; + + // Create hieradata with knockout_options + createHieradataFile(tempDir, "data/level0/data.yaml", { + lookup_options: { + [key]: { merge: "deep", knockout_prefix: knockoutPrefix }, + }, + [key]: { + keep_this: "value1", + [`${knockoutPrefix}remove_this`]: null, + }, + }); + createHieradataFile(tempDir, "data/level1/data.yaml", { + [key]: { + keep_this: "value2", + remove_this: "should_be_removed", + another_key: "value3", + }, + }); + + const config = createBasicConfig(["Level 0", "Level 1"]); + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + const resolvedHash = result.resolvedValue as Record; + + // The knocked-out key should not be present + expect("remove_this" in resolvedHash).toBe(false); + // Other keys should be present + expect(resolvedHash.keep_this).toBe("value1"); + expect(resolvedHash.another_key).toBe("value3"); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-11.test.ts b/backend/test/properties/hiera/property-11.test.ts new file mode 100644 index 0000000..73a47aa --- /dev/null +++ b/backend/test/properties/hiera/property-11.test.ts @@ -0,0 +1,307 @@ +/** + * Feature: hiera-codebase-integration, Property 11: Value Interpolation + * Validates: Requirements 5.5 + * + * This property test verifies that: + * For any Hiera value containing %{facts.xxx} or %{::xxx} variables, + * the HieraResolver SHALL replace them with the corresponding fact values + * and handle nested interpolation in arrays and objects. + */ + +import { describe, it, expect } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraResolver } from "../../../src/integrations/hiera/HieraResolver"; +import type { + HieraConfig, + Facts, +} from "../../../src/integrations/hiera/types"; + +describe("Property 11: Value Interpolation", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid fact names (alphanumeric with underscores) + const factNameArb = fc.string({ minLength: 1, maxLength: 15 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for simple fact values (strings and numbers) + const factValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{") && !s.includes(":")), + fc.integer({ min: 0, max: 1000 }) + ); + + // Generator for key names + const keyNameArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Helper to create a temp directory and resolver + function createTestEnvironment(): { tempDir: string; resolver: HieraResolver } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-interp-test-")); + const resolver = new HieraResolver(tempDir); + return { tempDir, resolver }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + // Helper to create a hieradata file + function createHieradataFile( + tempDir: string, + filePath: string, + data: Record + ): void { + const fullPath = path.join(tempDir, filePath); + fs.mkdirSync(path.dirname(fullPath), { recursive: true }); + fs.writeFileSync(fullPath, yaml.stringify(data)); + } + + // Helper to create a basic hierarchy config + function createBasicConfig(): HieraConfig { + return { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: [ + { + name: "Common", + path: "common.yaml", + }, + ], + }; + } + + it("should interpolate %{facts.xxx} variables with fact values", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factNameArb, factValueArb, async (key, factName, factValue) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with interpolation variable + const valueWithInterpolation = `prefix-%{facts.${factName}}-suffix`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithInterpolation, + }); + + const config = createBasicConfig(); + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + [factName]: factValue, + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + // The value should have the fact interpolated + const expectedValue = `prefix-${factValue}-suffix`; + expect(result.resolvedValue).toBe(expectedValue); + // Should track the interpolated variable + expect(result.interpolatedVariables).toBeDefined(); + expect(result.interpolatedVariables?.[`facts.${factName}`]).toBe(factValue); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should interpolate %{::xxx} legacy syntax with fact values", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factNameArb, factValueArb, async (key, factName, factValue) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with legacy interpolation variable + const valueWithInterpolation = `value-%{::${factName}}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithInterpolation, + }); + + const config = createBasicConfig(); + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + [factName]: factValue, + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + // The value should have the fact interpolated + const expectedValue = `value-${factValue}`; + expect(result.resolvedValue).toBe(expectedValue); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should interpolate variables in array values", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factNameArb, factValueArb, async (key, factName, factValue) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with array containing interpolation + createHieradataFile(tempDir, "data/common.yaml", { + [key]: [ + `item1-%{facts.${factName}}`, + "static-item", + `item2-%{facts.${factName}}`, + ], + }); + + const config = createBasicConfig(); + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + [factName]: factValue, + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + expect(Array.isArray(result.resolvedValue)).toBe(true); + + const resolvedArray = result.resolvedValue as string[]; + expect(resolvedArray[0]).toBe(`item1-${factValue}`); + expect(resolvedArray[1]).toBe("static-item"); + expect(resolvedArray[2]).toBe(`item2-${factValue}`); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should interpolate variables in nested object values", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factNameArb, factValueArb, async (key, factName, factValue) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with nested object containing interpolation + createHieradataFile(tempDir, "data/common.yaml", { + [key]: { + nested: { + value: `nested-%{facts.${factName}}`, + }, + direct: `direct-%{facts.${factName}}`, + }, + }); + + const config = createBasicConfig(); + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + [factName]: factValue, + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + expect(typeof result.resolvedValue).toBe("object"); + + const resolvedObj = result.resolvedValue as Record; + const nestedObj = resolvedObj.nested as Record; + + expect(nestedObj.value).toBe(`nested-${factValue}`); + expect(resolvedObj.direct).toBe(`direct-${factValue}`); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should preserve unresolved variables when fact is missing", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factNameArb, async (key, factName) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with interpolation variable + const valueWithInterpolation = `value-%{facts.${factName}}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithInterpolation, + }); + + const config = createBasicConfig(); + // Facts without the required fact + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + other_fact: "other_value", + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + // The unresolved variable should be preserved + expect(result.resolvedValue).toBe(valueWithInterpolation); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should handle nested fact paths like facts.os.family", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, async (key) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with nested fact path + createHieradataFile(tempDir, "data/common.yaml", { + [key]: "os-family-%{facts.os.family}", + }); + + const config = createBasicConfig(); + const facts: Facts = { + nodeId: "test-node", + gatheredAt: new Date().toISOString(), + facts: { + os: { + family: "RedHat", + name: "CentOS", + }, + }, + }; + + const result = await resolver.resolve(key, facts, config); + + expect(result.found).toBe(true); + expect(result.resolvedValue).toBe("os-family-RedHat"); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-12.test.ts b/backend/test/properties/hiera/property-12.test.ts new file mode 100644 index 0000000..1377ca6 --- /dev/null +++ b/backend/test/properties/hiera/property-12.test.ts @@ -0,0 +1,268 @@ +/** + * Feature: hiera-codebase-integration, Property 12: Missing Key Handling + * Validates: Requirements 5.6, 3.6 + * + * This property test verifies that: + * For any Hiera key that does not exist in any hierarchy level, + * the HieraResolver SHALL return an appropriate indicator (found: false) + * and SHALL NOT throw errors for missing keys. + */ + +import { describe, it, expect } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraResolver } from "../../../src/integrations/hiera/HieraResolver"; +import type { + HieraConfig, + Facts, +} from "../../../src/integrations/hiera/types"; + +describe("Property 12: Missing Key Handling", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key names + const keyNameArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{") && !s.includes(":")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Generator for facts + const factsArb: fc.Arbitrary = fc.record({ + nodeId: fc.constant("test-node"), + gatheredAt: fc.constant(new Date().toISOString()), + facts: fc.record({ + hostname: fc.constant("test-host"), + os: fc.record({ + family: fc.constantFrom("RedHat", "Debian", "Windows"), + name: fc.constantFrom("CentOS", "Ubuntu", "Windows"), + }), + }), + }); + + // Helper to create a temp directory and resolver + function createTestEnvironment(): { tempDir: string; resolver: HieraResolver } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-missing-test-")); + const resolver = new HieraResolver(tempDir); + return { tempDir, resolver }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + // Helper to create a hieradata file + function createHieradataFile( + tempDir: string, + filePath: string, + data: Record + ): void { + const fullPath = path.join(tempDir, filePath); + fs.mkdirSync(path.dirname(fullPath), { recursive: true }); + fs.writeFileSync(fullPath, yaml.stringify(data)); + } + + // Helper to create a basic hierarchy config + function createBasicConfig(): HieraConfig { + return { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: [ + { + name: "Common", + path: "common.yaml", + }, + ], + }; + } + + it("should return found: false for keys that do not exist", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, keyNameArb, simpleValueArb, factsArb, async (existingKey, missingKey, value, facts) => { + // Ensure the keys are different + if (existingKey === missingKey) { + return; // Skip this case + } + + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with only the existing key + createHieradataFile(tempDir, "data/common.yaml", { + [existingKey]: value, + }); + + const config = createBasicConfig(); + + // Try to resolve the missing key + const result = await resolver.resolve(missingKey, facts, config); + + // Should NOT throw an error + // Should return found: false + expect(result.found).toBe(false); + expect(result.key).toBe(missingKey); + expect(result.resolvedValue).toBeUndefined(); + expect(result.allValues).toEqual([]); + expect(result.sourceFile).toBe(""); + expect(result.hierarchyLevel).toBe(""); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should not throw errors when resolving missing keys", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factsArb, async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create empty hieradata file + createHieradataFile(tempDir, "data/common.yaml", {}); + + const config = createBasicConfig(); + + // Should not throw + let error: Error | null = null; + let result; + try { + result = await resolver.resolve(key, facts, config); + } catch (e) { + error = e as Error; + } + + expect(error).toBeNull(); + expect(result).toBeDefined(); + expect(result?.found).toBe(false); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should return default value when provided for missing keys", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, simpleValueArb, factsArb, async (key, defaultValue, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create empty hieradata file + createHieradataFile(tempDir, "data/common.yaml", {}); + + const config = createBasicConfig(); + + const result = await resolver.resolve(key, facts, config, { + defaultValue, + }); + + expect(result.found).toBe(false); + expect(result.resolvedValue).toEqual(defaultValue); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should handle missing hieradata files gracefully", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factsArb, async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Don't create any hieradata files + const config = createBasicConfig(); + + // Should not throw + let error: Error | null = null; + let result; + try { + result = await resolver.resolve(key, facts, config); + } catch (e) { + error = e as Error; + } + + expect(error).toBeNull(); + expect(result).toBeDefined(); + expect(result?.found).toBe(false); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should return found: false when key exists in no hierarchy levels", async () => { + await fc.assert( + fc.asyncProperty( + keyNameArb, + fc.array(keyNameArb, { minLength: 1, maxLength: 3 }), + factsArb, + async (missingKey, existingKeys, facts) => { + // Ensure missing key is not in existing keys + if (existingKeys.includes(missingKey)) { + return; // Skip this case + } + + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create multiple hierarchy levels with different keys + const config: HieraConfig = { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: [ + { name: "Level 0", path: "level0.yaml" }, + { name: "Level 1", path: "level1.yaml" }, + ], + }; + + // Create hieradata files with existing keys but not the missing key + const data0: Record = {}; + const data1: Record = {}; + existingKeys.forEach((k, i) => { + if (i % 2 === 0) { + data0[k] = `value-${i}`; + } else { + data1[k] = `value-${i}`; + } + }); + + createHieradataFile(tempDir, "data/level0.yaml", data0); + createHieradataFile(tempDir, "data/level1.yaml", data1); + + const result = await resolver.resolve(missingKey, facts, config); + + expect(result.found).toBe(false); + expect(result.allValues).toEqual([]); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-13.test.ts b/backend/test/properties/hiera/property-13.test.ts new file mode 100644 index 0000000..6470c7d --- /dev/null +++ b/backend/test/properties/hiera/property-13.test.ts @@ -0,0 +1,320 @@ +/** + * Feature: hiera-codebase-integration, Property 13: Key Usage Filtering + * Validates: Requirements 6.6 + * + * This property test verifies that: + * For any node with a set of included classes and a set of Hiera keys, + * filtering by "used" SHALL return only keys that are referenced by the + * included classes, and filtering by "unused" SHALL return the complement. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { HieraService, type HieraServiceConfig } from "../../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../../src/integrations/IntegrationManager"; + +describe("Property 13: Key Usage Filtering", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid class names (Puppet class naming convention) + const classNamePartArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + const classNameArb = fc.array(classNamePartArb, { minLength: 1, maxLength: 3 }) + .map((parts) => parts.join("::")); + + // Generator for Hiera key names (typically match class patterns) + const hieraKeyArb = fc.array(classNamePartArb, { minLength: 1, maxLength: 4 }) + .map((parts) => parts.join("::")); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Helper to create a temp directory with test structure + function createTestEnvironment( + keys: string[], + keyValues: Map + ): { tempDir: string; service: HieraService; integrationManager: IntegrationManager } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-key-usage-test-")); + + // Create directories + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.mkdirSync(path.join(tempDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml with all keys + const commonData: Record = {}; + for (const key of keys) { + commonData[key] = keyValues.get(key) ?? "default_value"; + } + + // Use yaml library for proper YAML formatting + const yaml = require("yaml"); + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + + // Create local fact file + const factData = { + name: "test-node.example.com", + values: { + networking: { + hostname: "test-node", + fqdn: "test-node.example.com", + }, + }, + }; + fs.writeFileSync( + path.join(tempDir, "facts", "test-node.example.com.json"), + JSON.stringify(factData, null, 2) + ); + + // Create integration manager and service + const integrationManager = new IntegrationManager(); + + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(tempDir, "facts"), + }, + cache: { + enabled: false, // Disable caching for tests + ttl: 0, + maxEntries: 0, + }, + }; + + const service = new HieraService(integrationManager, config); + + return { tempDir, service, integrationManager }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + it("should partition keys into used and unused sets that are disjoint", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 10 }), + async (keys) => { + // Ensure unique keys + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (const key of uniqueKeys) { + keyValues.set(key, `value_for_${key}`); + } + + const { tempDir, service } = createTestEnvironment(uniqueKeys, keyValues); + try { + await service.initialize(); + + const nodeData = await service.getNodeHieraData("test-node.example.com"); + + // Used and unused sets should be disjoint + const intersection = new Set( + [...nodeData.usedKeys].filter((k) => nodeData.unusedKeys.has(k)) + ); + expect(intersection.size).toBe(0); + + // Union should equal all keys + const union = new Set([...nodeData.usedKeys, ...nodeData.unusedKeys]); + expect(union.size).toBe(nodeData.keys.size); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should classify all resolved keys into either used or unused", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 10 }), + async (keys) => { + // Ensure unique keys + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (const key of uniqueKeys) { + keyValues.set(key, `value_for_${key}`); + } + + const { tempDir, service } = createTestEnvironment(uniqueKeys, keyValues); + try { + await service.initialize(); + + const nodeData = await service.getNodeHieraData("test-node.example.com"); + + // Every key in the keys map should be in either usedKeys or unusedKeys + for (const keyName of nodeData.keys.keys()) { + const isUsed = nodeData.usedKeys.has(keyName); + const isUnused = nodeData.unusedKeys.has(keyName); + + // Key must be in exactly one set + expect(isUsed || isUnused).toBe(true); + expect(isUsed && isUnused).toBe(false); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should mark all keys as unused when no catalog data is available", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 10 }), + async (keys) => { + // Ensure unique keys + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (const key of uniqueKeys) { + keyValues.set(key, `value_for_${key}`); + } + + const { tempDir, service } = createTestEnvironment(uniqueKeys, keyValues); + try { + await service.initialize(); + + // Without PuppetDB, no catalog data is available + const nodeData = await service.getNodeHieraData("test-node.example.com"); + + // All keys should be marked as unused + expect(nodeData.usedKeys.size).toBe(0); + expect(nodeData.unusedKeys.size).toBe(nodeData.keys.size); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should maintain consistency between usedKeys/unusedKeys and keys map size", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 15 }), + fc.array(simpleValueArb, { minLength: 1, maxLength: 15 }), + async (keys, values) => { + // Ensure unique keys and match with values + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment(uniqueKeys, keyValues); + try { + await service.initialize(); + + const nodeData = await service.getNodeHieraData("test-node.example.com"); + + // Total classified keys should equal total keys + const totalClassified = nodeData.usedKeys.size + nodeData.unusedKeys.size; + expect(totalClassified).toBe(nodeData.keys.size); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should return consistent results for the same node", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 8 }), + async (keys) => { + // Ensure unique keys + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (const key of uniqueKeys) { + keyValues.set(key, `value_for_${key}`); + } + + const { tempDir, service } = createTestEnvironment(uniqueKeys, keyValues); + try { + await service.initialize(); + + // Get node data twice + const nodeData1 = await service.getNodeHieraData("test-node.example.com"); + + // Invalidate cache to force re-computation + service.invalidateCache(); + + const nodeData2 = await service.getNodeHieraData("test-node.example.com"); + + // Results should be consistent + expect(nodeData1.usedKeys.size).toBe(nodeData2.usedKeys.size); + expect(nodeData1.unusedKeys.size).toBe(nodeData2.unusedKeys.size); + + // Same keys should be in same sets + for (const key of nodeData1.usedKeys) { + expect(nodeData2.usedKeys.has(key)).toBe(true); + } + for (const key of nodeData1.unusedKeys) { + expect(nodeData2.unusedKeys.has(key)).toBe(true); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-14.test.ts b/backend/test/properties/hiera/property-14.test.ts new file mode 100644 index 0000000..e6ac694 --- /dev/null +++ b/backend/test/properties/hiera/property-14.test.ts @@ -0,0 +1,378 @@ +/** + * Feature: hiera-codebase-integration, Property 14: Global Key Resolution Across Nodes + * Validates: Requirements 7.2, 7.3, 7.6 + * + * This property test verifies that: + * For any Hiera key and set of nodes, querying the key across all nodes SHALL return + * for each node: the resolved value (or indication of not found), the source file, + * and the hierarchy level. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraService, type HieraServiceConfig } from "../../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../../src/integrations/IntegrationManager"; + +describe("Property 14: Global Key Resolution Across Nodes", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key name parts + const keyPartArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for Hiera key names + const hieraKeyArb = fc.array(keyPartArb, { minLength: 1, maxLength: 3 }) + .map((parts) => parts.join("::")); + + // Generator for node names + const nodeNameArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9-]*$/.test(s)) + .map((name) => `${name}.example.com`); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Helper to create a temp directory with test structure + function createTestEnvironment( + nodes: string[], + keys: string[], + nodeKeyValues: Map>, + commonKeyValues: Map + ): { tempDir: string; service: HieraService; integrationManager: IntegrationManager } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-global-key-test-")); + + // Create directories + fs.mkdirSync(path.join(tempDir, "data", "nodes"), { recursive: true }); + fs.mkdirSync(path.join(tempDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Per-node data" + path: "nodes/%{facts.networking.hostname}.yaml" + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml with common key values + const commonData: Record = {}; + for (const [key, value] of commonKeyValues) { + commonData[key] = value; + } + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + + // Create node-specific data and fact files + for (const nodeId of nodes) { + const hostname = nodeId.split(".")[0]; + + // Create node-specific hieradata + const nodeData: Record = {}; + const nodeValues = nodeKeyValues.get(nodeId); + if (nodeValues) { + for (const [key, value] of nodeValues) { + nodeData[key] = value; + } + } + if (Object.keys(nodeData).length > 0) { + fs.writeFileSync( + path.join(tempDir, "data", "nodes", `${hostname}.yaml`), + yaml.stringify(nodeData) + ); + } + + // Create fact file + const factData = { + name: nodeId, + values: { + networking: { + hostname, + fqdn: nodeId, + }, + }, + }; + fs.writeFileSync( + path.join(tempDir, "facts", `${nodeId}.json`), + JSON.stringify(factData, null, 2) + ); + } + + // Create integration manager and service + const integrationManager = new IntegrationManager(); + + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(tempDir, "facts"), + }, + cache: { + enabled: false, + ttl: 0, + maxEntries: 0, + }, + }; + + const service = new HieraService(integrationManager, config); + + return { tempDir, service, integrationManager }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + it("should return results for all available nodes", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 5 }), + hieraKeyArb, + simpleValueArb, + async (nodes, key, commonValue) => { + // Ensure unique nodes + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length === 0) return; + + const nodeKeyValues = new Map>(); + const commonKeyValues = new Map([[key, commonValue]]); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + [key], + nodeKeyValues, + commonKeyValues + ); + + try { + await service.initialize(); + + const results = await service.getKeyValuesAcrossNodes(key); + + // Should have results for all nodes + expect(results.length).toBe(uniqueNodes.length); + + // Each node should be represented + const resultNodeIds = new Set(results.map((r) => r.nodeId)); + for (const nodeId of uniqueNodes) { + expect(resultNodeIds.has(nodeId)).toBe(true); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should include source file and hierarchy level for found keys", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 3 }), + hieraKeyArb, + simpleValueArb, + async (nodes, key, value) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length === 0) return; + + const nodeKeyValues = new Map>(); + const commonKeyValues = new Map([[key, value]]); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + [key], + nodeKeyValues, + commonKeyValues + ); + + try { + await service.initialize(); + + const results = await service.getKeyValuesAcrossNodes(key); + + for (const result of results) { + if (result.found) { + // Source file should be defined and non-empty + expect(result.sourceFile).toBeTruthy(); + // Hierarchy level should be defined and non-empty + expect(result.hierarchyLevel).toBeTruthy(); + } + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should indicate when key is not defined for a node", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 3 }), + hieraKeyArb, + async (nodes, key) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length === 0) return; + + // Don't define the key anywhere + const nodeKeyValues = new Map>(); + const commonKeyValues = new Map(); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + [], + nodeKeyValues, + commonKeyValues + ); + + try { + await service.initialize(); + + const results = await service.getKeyValuesAcrossNodes(key); + + // All results should indicate key not found + for (const result of results) { + expect(result.found).toBe(false); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should return node-specific values when they override common values", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 4 }), + hieraKeyArb, + simpleValueArb, + simpleValueArb, + async (nodes, key, commonValue, nodeSpecificValue) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length < 2) return; + // Ensure values are different + if (JSON.stringify(commonValue) === JSON.stringify(nodeSpecificValue)) return; + + // First node gets a specific value, others use common + const firstNode = uniqueNodes[0]; + const nodeKeyValues = new Map>(); + nodeKeyValues.set(firstNode, new Map([[key, nodeSpecificValue]])); + + const commonKeyValues = new Map([[key, commonValue]]); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + [key], + nodeKeyValues, + commonKeyValues + ); + + try { + await service.initialize(); + + const results = await service.getKeyValuesAcrossNodes(key); + + // Find results for first node and others + const firstNodeResult = results.find((r) => r.nodeId === firstNode); + const otherResults = results.filter((r) => r.nodeId !== firstNode); + + // First node should have node-specific value + expect(firstNodeResult?.found).toBe(true); + expect(firstNodeResult?.value).toEqual(nodeSpecificValue); + + // Other nodes should have common value + for (const result of otherResults) { + expect(result.found).toBe(true); + expect(result.value).toEqual(commonValue); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should return consistent results across multiple calls", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 3 }), + hieraKeyArb, + simpleValueArb, + async (nodes, key, value) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length === 0) return; + + const nodeKeyValues = new Map>(); + const commonKeyValues = new Map([[key, value]]); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + [key], + nodeKeyValues, + commonKeyValues + ); + + try { + await service.initialize(); + + // Call twice + const results1 = await service.getKeyValuesAcrossNodes(key); + const results2 = await service.getKeyValuesAcrossNodes(key); + + // Results should be consistent + expect(results1.length).toBe(results2.length); + + for (let i = 0; i < results1.length; i++) { + const r1 = results1.find((r) => r.nodeId === results2[i].nodeId); + expect(r1).toBeDefined(); + expect(r1?.value).toEqual(results2[i].value); + expect(r1?.found).toBe(results2[i].found); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-15.test.ts b/backend/test/properties/hiera/property-15.test.ts new file mode 100644 index 0000000..d11ce60 --- /dev/null +++ b/backend/test/properties/hiera/property-15.test.ts @@ -0,0 +1,434 @@ +/** + * Feature: hiera-codebase-integration, Property 15: Node Grouping by Value + * Validates: Requirements 7.5 + * + * This property test verifies that: + * For any set of key-node-value tuples, grouping by resolved value SHALL produce + * groups where all nodes in each group have the same resolved value for the key. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraService, type HieraServiceConfig } from "../../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../../src/integrations/IntegrationManager"; +import type { KeyNodeValues } from "../../../src/integrations/hiera/types"; + +describe("Property 15: Node Grouping by Value", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key name parts + const keyPartArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for Hiera key names + const hieraKeyArb = fc.array(keyPartArb, { minLength: 1, maxLength: 3 }) + .map((parts) => parts.join("::")); + + // Generator for node names + const nodeNameArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9-]*$/.test(s)) + .map((name) => `${name}.example.com`); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Generator for KeyNodeValues + const keyNodeValuesArb = fc.record({ + nodeId: nodeNameArb, + value: fc.option(simpleValueArb, { nil: undefined }), + sourceFile: fc.string({ minLength: 1, maxLength: 30 }), + hierarchyLevel: fc.string({ minLength: 1, maxLength: 20 }), + found: fc.boolean(), + }).map((r) => ({ + ...r, + // If found is false, value should be undefined + value: r.found ? r.value : undefined, + })); + + // Helper to create a temp directory with test structure + function createTestEnvironment( + nodes: string[], + nodeKeyValues: Map, + commonValue?: unknown + ): { tempDir: string; service: HieraService; integrationManager: IntegrationManager } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-grouping-test-")); + + // Create directories + fs.mkdirSync(path.join(tempDir, "data", "nodes"), { recursive: true }); + fs.mkdirSync(path.join(tempDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Per-node data" + path: "nodes/%{facts.networking.hostname}.yaml" + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml + const commonData: Record = {}; + if (commonValue !== undefined) { + commonData["test_key"] = commonValue; + } + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + + // Create node-specific data and fact files + for (const nodeId of nodes) { + const hostname = nodeId.split(".")[0]; + + // Create node-specific hieradata if value is set + const nodeValue = nodeKeyValues.get(nodeId); + if (nodeValue !== undefined) { + const nodeData = { test_key: nodeValue }; + fs.writeFileSync( + path.join(tempDir, "data", "nodes", `${hostname}.yaml`), + yaml.stringify(nodeData) + ); + } + + // Create fact file + const factData = { + name: nodeId, + values: { + networking: { + hostname, + fqdn: nodeId, + }, + }, + }; + fs.writeFileSync( + path.join(tempDir, "facts", `${nodeId}.json`), + JSON.stringify(factData, null, 2) + ); + } + + // Create integration manager and service + const integrationManager = new IntegrationManager(); + + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(tempDir, "facts"), + }, + cache: { + enabled: false, + ttl: 0, + maxEntries: 0, + }, + }; + + const service = new HieraService(integrationManager, config); + + return { tempDir, service, integrationManager }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + it("should group all nodes with the same value together", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(keyNodeValuesArb, { minLength: 1, maxLength: 10 }), + async (keyNodeValues) => { + // Ensure unique node IDs + const seenNodes = new Set(); + const uniqueKeyNodeValues = keyNodeValues.filter((knv) => { + if (seenNodes.has(knv.nodeId)) return false; + seenNodes.add(knv.nodeId); + return true; + }); + + if (uniqueKeyNodeValues.length === 0) return; + + // Create a minimal service just to use the groupNodesByValue method + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-grouping-test-")); + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), "version: 5\nhierarchy: []"); + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), ""); + + const integrationManager = new IntegrationManager(); + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { preferPuppetDB: false }, + cache: { enabled: false, ttl: 0, maxEntries: 0 }, + }; + const service = new HieraService(integrationManager, config); + + try { + const groups = service.groupNodesByValue(uniqueKeyNodeValues); + + // All nodes in each group should have the same value + for (const group of groups) { + const nodesInGroup = uniqueKeyNodeValues.filter((knv) => + group.nodes.includes(knv.nodeId) + ); + + for (const node of nodesInGroup) { + // For not found nodes, group.value should be undefined + if (!node.found) { + expect(group.value).toBeUndefined(); + } else { + // For found nodes, values should match + expect(JSON.stringify(node.value)).toBe(JSON.stringify(group.value)); + } + } + } + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should include every node in exactly one group", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(keyNodeValuesArb, { minLength: 1, maxLength: 10 }), + async (keyNodeValues) => { + // Ensure unique node IDs + const seenNodes = new Set(); + const uniqueKeyNodeValues = keyNodeValues.filter((knv) => { + if (seenNodes.has(knv.nodeId)) return false; + seenNodes.add(knv.nodeId); + return true; + }); + + if (uniqueKeyNodeValues.length === 0) return; + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-grouping-test-")); + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), "version: 5\nhierarchy: []"); + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), ""); + + const integrationManager = new IntegrationManager(); + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { preferPuppetDB: false }, + cache: { enabled: false, ttl: 0, maxEntries: 0 }, + }; + const service = new HieraService(integrationManager, config); + + try { + const groups = service.groupNodesByValue(uniqueKeyNodeValues); + + // Collect all nodes from all groups + const allGroupedNodes: string[] = []; + for (const group of groups) { + allGroupedNodes.push(...group.nodes); + } + + // Every input node should appear exactly once + const inputNodeIds = uniqueKeyNodeValues.map((knv) => knv.nodeId); + expect(allGroupedNodes.sort()).toEqual(inputNodeIds.sort()); + + // No duplicates + const uniqueGroupedNodes = new Set(allGroupedNodes); + expect(uniqueGroupedNodes.size).toBe(allGroupedNodes.length); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should create separate groups for different values", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 5 }), + fc.array(simpleValueArb, { minLength: 2, maxLength: 3 }), + async (nodes, values) => { + const uniqueNodes = [...new Set(nodes)]; + const uniqueValues = [...new Set(values.map((v) => JSON.stringify(v)))].map( + (s) => JSON.parse(s) as unknown + ); + + if (uniqueNodes.length < 2 || uniqueValues.length < 2) return; + + // Assign different values to different nodes + const keyNodeValues: KeyNodeValues[] = uniqueNodes.map((nodeId, i) => ({ + nodeId, + value: uniqueValues[i % uniqueValues.length], + sourceFile: "test.yaml", + hierarchyLevel: "common", + found: true, + })); + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-grouping-test-")); + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), "version: 5\nhierarchy: []"); + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), ""); + + const integrationManager = new IntegrationManager(); + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { preferPuppetDB: false }, + cache: { enabled: false, ttl: 0, maxEntries: 0 }, + }; + const service = new HieraService(integrationManager, config); + + try { + const groups = service.groupNodesByValue(keyNodeValues); + + // Number of groups should be at most the number of unique values + const actualUniqueValues = new Set( + keyNodeValues.map((knv) => JSON.stringify(knv.value)) + ); + expect(groups.length).toBeLessThanOrEqual(actualUniqueValues.size); + + // Each group should have a distinct value + const groupValues = groups.map((g) => JSON.stringify(g.value)); + const uniqueGroupValues = new Set(groupValues); + expect(uniqueGroupValues.size).toBe(groups.length); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should handle nodes where key is not found separately", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 5 }), + simpleValueArb, + async (nodes, value) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length < 2) return; + + // Half nodes have the value, half don't + const midpoint = Math.floor(uniqueNodes.length / 2); + const keyNodeValues: KeyNodeValues[] = uniqueNodes.map((nodeId, i) => ({ + nodeId, + value: i < midpoint ? value : undefined, + sourceFile: i < midpoint ? "test.yaml" : "", + hierarchyLevel: i < midpoint ? "common" : "", + found: i < midpoint, + })); + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-grouping-test-")); + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), "version: 5\nhierarchy: []"); + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), ""); + + const integrationManager = new IntegrationManager(); + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { preferPuppetDB: false }, + cache: { enabled: false, ttl: 0, maxEntries: 0 }, + }; + const service = new HieraService(integrationManager, config); + + try { + const groups = service.groupNodesByValue(keyNodeValues); + + // Should have at least 2 groups (found and not found) + expect(groups.length).toBeGreaterThanOrEqual(2); + + // Find the "not found" group + const notFoundGroup = groups.find((g) => g.value === undefined); + expect(notFoundGroup).toBeDefined(); + + // All nodes in not found group should have found=false + const notFoundNodes = keyNodeValues.filter((knv) => !knv.found); + expect(notFoundGroup?.nodes.sort()).toEqual( + notFoundNodes.map((n) => n.nodeId).sort() + ); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should work with real HieraService resolution", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 4 }), + simpleValueArb, + simpleValueArb, + async (nodes, commonValue, nodeSpecificValue) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length < 2) return; + if (JSON.stringify(commonValue) === JSON.stringify(nodeSpecificValue)) return; + + // First node gets specific value, others get common + const nodeKeyValues = new Map(); + nodeKeyValues.set(uniqueNodes[0], nodeSpecificValue); + + const { tempDir, service } = createTestEnvironment( + uniqueNodes, + nodeKeyValues, + commonValue + ); + + try { + await service.initialize(); + + const keyValues = await service.getKeyValuesAcrossNodes("test_key"); + const groups = service.groupNodesByValue(keyValues); + + // Should have 2 groups (one for node-specific, one for common) + expect(groups.length).toBe(2); + + // Verify grouping is correct + for (const group of groups) { + const nodesInGroup = keyValues.filter((kv) => + group.nodes.includes(kv.nodeId) + ); + for (const node of nodesInGroup) { + expect(JSON.stringify(node.value)).toBe(JSON.stringify(group.value)); + } + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-24.test.ts b/backend/test/properties/hiera/property-24.test.ts new file mode 100644 index 0000000..4ef8668 --- /dev/null +++ b/backend/test/properties/hiera/property-24.test.ts @@ -0,0 +1,483 @@ +/** + * Feature: hiera-codebase-integration, Property 24: Catalog Compilation Mode Behavior + * Validates: Requirements 12.2, 12.3, 12.4 + * + * This property test verifies that: + * For any Hiera key resolution request: + * - When catalog compilation is disabled, only facts SHALL be used for variable interpolation + * - When catalog compilation is enabled and succeeds, code-defined variables SHALL also be available + * - When catalog compilation is enabled but fails, the resolver SHALL fall back to fact-only resolution + */ + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraResolver } from "../../../src/integrations/hiera/HieraResolver"; +import { CatalogCompiler } from "../../../src/integrations/hiera/CatalogCompiler"; +import type { IntegrationManager } from "../../../src/integrations/IntegrationManager"; +import type { + HieraConfig, + Facts, + CatalogCompilationConfig, +} from "../../../src/integrations/hiera/types"; + +describe("Property 24: Catalog Compilation Mode Behavior", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key names + const keyNameArb = fc + .string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc + .string({ minLength: 1, maxLength: 20 }) + .filter((s) => !s.includes("%{") && !s.includes(":")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Generator for facts + const factsArb: fc.Arbitrary = fc.record({ + nodeId: fc.constant("test-node"), + gatheredAt: fc.constant(new Date().toISOString()), + facts: fc.record({ + hostname: fc.constant("test-host"), + os: fc.record({ + family: fc.constantFrom("RedHat", "Debian", "Windows"), + name: fc.constantFrom("CentOS", "Ubuntu", "Windows"), + }), + environment: fc.constant("production"), + }), + }); + + // Generator for catalog variables + const catalogVariablesArb = fc.dictionary( + keyNameArb, + simpleValueArb, + { minKeys: 1, maxKeys: 5 } + ); + + // Helper to create a temp directory and resolver + function createTestEnvironment(): { + tempDir: string; + resolver: HieraResolver; + } { + const tempDir = fs.mkdtempSync( + path.join(os.tmpdir(), "hiera-catalog-test-") + ); + const resolver = new HieraResolver(tempDir); + return { tempDir, resolver }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + // Helper to create a hieradata file + function createHieradataFile( + tempDir: string, + filePath: string, + data: Record + ): void { + const fullPath = path.join(tempDir, filePath); + fs.mkdirSync(path.dirname(fullPath), { recursive: true }); + fs.writeFileSync(fullPath, yaml.stringify(data)); + } + + // Helper to create a basic hierarchy config + function createBasicConfig(): HieraConfig { + return { + version: 5, + defaults: { + datadir: "data", + data_hash: "yaml_data", + }, + hierarchy: [ + { + name: "Common", + path: "common.yaml", + }, + ], + }; + } + + // Mock integration manager + function createMockIntegrationManager( + puppetserverAvailable: boolean = false, + compilationResult: unknown = null + ): IntegrationManager { + const mockPuppetserver = { + isInitialized: () => puppetserverAvailable, + compileCatalog: vi.fn().mockResolvedValue(compilationResult), + getNodeData: vi.fn().mockResolvedValue(compilationResult), + }; + + return { + getInformationSource: (name: string) => { + if (name === "puppetserver" && puppetserverAvailable) { + return mockPuppetserver as unknown as ReturnType< + IntegrationManager["getInformationSource"] + >; + } + return null; + }, + } as unknown as IntegrationManager; + } + + describe("When catalog compilation is disabled", () => { + it("should only use facts for variable interpolation", async () => { + await fc.assert( + fc.asyncProperty( + keyNameArb, + simpleValueArb, + factsArb, + catalogVariablesArb, + async (key, value, facts, catalogVars) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with a value that uses variable interpolation + const valueWithVar = `prefix_%{facts.hostname}_suffix`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithVar, + }); + + const config = createBasicConfig(); + + // Resolve WITHOUT catalog variables (simulating disabled compilation) + const result = await resolver.resolve(key, facts, config, { + catalogVariables: {}, // Empty - compilation disabled + }); + + expect(result.found).toBe(true); + // The value should be interpolated using facts only + expect(result.resolvedValue).toBe( + `prefix_${facts.facts.hostname}_suffix` + ); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + }); + + describe("When catalog compilation is enabled and succeeds", () => { + it("should use catalog variables for interpolation", async () => { + await fc.assert( + fc.asyncProperty( + keyNameArb, + factsArb, + async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with a value that uses a catalog variable + const valueWithVar = `value_is_%{custom_var}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithVar, + }); + + const config = createBasicConfig(); + + // Resolve WITH catalog variables + const catalogVariables = { + custom_var: "from_catalog", + }; + + const result = await resolver.resolve(key, facts, config, { + catalogVariables, + }); + + expect(result.found).toBe(true); + // The value should be interpolated using catalog variables + expect(result.resolvedValue).toBe("value_is_from_catalog"); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should prefer catalog variables over facts for non-prefixed variables", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factsArb, async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with a value that uses a variable that exists in both + const valueWithVar = `value_is_%{hostname}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithVar, + }); + + const config = createBasicConfig(); + + // Catalog variable should override fact + const catalogVariables = { + hostname: "catalog_hostname", + }; + + const result = await resolver.resolve(key, facts, config, { + catalogVariables, + }); + + expect(result.found).toBe(true); + // Catalog variable should win for non-prefixed variables + expect(result.resolvedValue).toBe("value_is_catalog_hostname"); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + + it("should still use facts for facts.xxx prefixed variables", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factsArb, async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with a value that explicitly uses facts.xxx syntax + const valueWithVar = `value_is_%{facts.hostname}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithVar, + }); + + const config = createBasicConfig(); + + // Even with catalog variables, facts.xxx should use facts + const catalogVariables = { + hostname: "catalog_hostname", + "facts.hostname": "should_not_be_used", + }; + + const result = await resolver.resolve(key, facts, config, { + catalogVariables, + }); + + expect(result.found).toBe(true); + // facts.xxx syntax should always use facts + expect(result.resolvedValue).toBe( + `value_is_${facts.facts.hostname}` + ); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + }); + + describe("When catalog compilation fails", () => { + it("should fall back to fact-only resolution", async () => { + await fc.assert( + fc.asyncProperty(keyNameArb, factsArb, async (key, facts) => { + const { tempDir, resolver } = createTestEnvironment(); + try { + // Create hieradata with a value using facts + const valueWithVar = `value_is_%{facts.hostname}`; + createHieradataFile(tempDir, "data/common.yaml", { + [key]: valueWithVar, + }); + + const config = createBasicConfig(); + + // Simulate failed compilation by passing empty variables with warning + const result = await resolver.resolve(key, facts, config, { + catalogVariables: {}, // Empty due to failure + catalogWarnings: [ + "Catalog compilation failed - using fact-only resolution", + ], + }); + + expect(result.found).toBe(true); + // Should still resolve using facts + expect(result.resolvedValue).toBe( + `value_is_${facts.facts.hostname}` + ); + // Warnings should be tracked + expect(result.interpolatedVariables?.__catalogWarnings).toContain( + "Catalog compilation failed - using fact-only resolution" + ); + } finally { + cleanupTestEnvironment(tempDir); + } + }), + propertyTestConfig + ); + }); + }); + + describe("CatalogCompiler behavior", () => { + it("should return disabled result when compilation is disabled", async () => { + await fc.assert( + fc.asyncProperty(factsArb, async (facts) => { + const mockManager = createMockIntegrationManager(false); + const config: CatalogCompilationConfig = { + enabled: false, + timeout: 60000, + cacheTTL: 300000, + }; + + const compiler = new CatalogCompiler(mockManager, config); + + expect(compiler.isEnabled()).toBe(false); + + const result = await compiler.compileCatalog( + "test-node", + "production", + facts + ); + + expect(result.success).toBe(false); + expect(result.error).toBe("Catalog compilation is disabled"); + expect(result.variables).toEqual({}); + }), + propertyTestConfig + ); + }); + + it("should return failed result when Puppetserver is unavailable", async () => { + await fc.assert( + fc.asyncProperty(factsArb, async (facts) => { + const mockManager = createMockIntegrationManager(false); + const config: CatalogCompilationConfig = { + enabled: true, + timeout: 60000, + cacheTTL: 300000, + }; + + const compiler = new CatalogCompiler(mockManager, config); + + expect(compiler.isEnabled()).toBe(true); + + const result = await compiler.compileCatalog( + "test-node", + "production", + facts + ); + + expect(result.success).toBe(false); + expect(result.error).toContain("Puppetserver integration not available"); + expect(result.variables).toEqual({}); + }), + propertyTestConfig + ); + }); + + it("should extract variables from compiled catalog", async () => { + await fc.assert( + fc.asyncProperty(factsArb, async (facts) => { + // Mock a successful catalog compilation + const mockCatalog = { + resources: [ + { + type: "Class", + title: "profile::nginx", + parameters: { + port: 8080, + enabled: true, + }, + }, + { + type: "Class", + title: "profile::base", + parameters: { + timezone: "UTC", + }, + }, + ], + environment: "production", + }; + + const mockManager = createMockIntegrationManager(true, mockCatalog); + const config: CatalogCompilationConfig = { + enabled: true, + timeout: 60000, + cacheTTL: 300000, + }; + + const compiler = new CatalogCompiler(mockManager, config); + const result = await compiler.compileCatalog( + "test-node", + "production", + facts + ); + + expect(result.success).toBe(true); + expect(result.variables).toHaveProperty("profile::nginx::port", 8080); + expect(result.variables).toHaveProperty("profile::nginx::enabled", true); + expect(result.variables).toHaveProperty("profile::base::timezone", "UTC"); + expect(result.variables).toHaveProperty("environment", "production"); + expect(result.classes).toContain("profile::nginx"); + expect(result.classes).toContain("profile::base"); + }), + propertyTestConfig + ); + }); + + it("should cache compiled catalogs", async () => { + await fc.assert( + fc.asyncProperty(factsArb, async (facts) => { + const mockCatalog = { + resources: [ + { + type: "Class", + title: "test::class", + parameters: { value: "cached" }, + }, + ], + environment: "production", + }; + + const mockManager = createMockIntegrationManager(true, mockCatalog); + const config: CatalogCompilationConfig = { + enabled: true, + timeout: 60000, + cacheTTL: 300000, + }; + + const compiler = new CatalogCompiler(mockManager, config); + + // First call + const result1 = await compiler.compileCatalog( + "test-node", + "production", + facts + ); + expect(result1.success).toBe(true); + + // Second call should use cache + const result2 = await compiler.compileCatalog( + "test-node", + "production", + facts + ); + expect(result2.success).toBe(true); + expect(result2.variables).toEqual(result1.variables); + + // Verify cache stats + const stats = compiler.getCacheStats(); + expect(stats.size).toBe(1); + }), + propertyTestConfig + ); + }); + }); +}); diff --git a/backend/test/properties/hiera/property-28.test.ts b/backend/test/properties/hiera/property-28.test.ts new file mode 100644 index 0000000..cb99ece --- /dev/null +++ b/backend/test/properties/hiera/property-28.test.ts @@ -0,0 +1,376 @@ +/** + * Feature: hiera-codebase-integration, Property 28: Cache Correctness + * Validates: Requirements 15.1, 15.5 + * + * This property test verifies that: + * For any sequence of Hiera operations, cached results SHALL be equivalent + * to freshly computed results until the underlying data changes. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraService, type HieraServiceConfig } from "../../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../../src/integrations/IntegrationManager"; + +describe("Property 28: Cache Correctness", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid key name parts + const keyPartArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for Hiera key names + const hieraKeyArb = fc.array(keyPartArb, { minLength: 1, maxLength: 3 }) + .map((parts) => parts.join("::")); + + // Generator for node names + const nodeNameArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9-]*$/.test(s)) + .map((name) => `${name}.example.com`); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => !s.includes("%{")), + fc.integer({ min: -1000, max: 1000 }), + fc.boolean() + ); + + // Helper to create a temp directory with test structure + function createTestEnvironment( + nodes: string[], + keys: string[], + keyValues: Map + ): { tempDir: string; service: HieraService; integrationManager: IntegrationManager } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-cache-test-")); + + // Create directories + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.mkdirSync(path.join(tempDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml with all keys + const commonData: Record = {}; + for (const key of keys) { + commonData[key] = keyValues.get(key) ?? "default_value"; + } + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + + // Create fact files for nodes + for (const nodeId of nodes) { + const hostname = nodeId.split(".")[0]; + const factData = { + name: nodeId, + values: { + networking: { + hostname, + fqdn: nodeId, + }, + }, + }; + fs.writeFileSync( + path.join(tempDir, "facts", `${nodeId}.json`), + JSON.stringify(factData, null, 2) + ); + } + + // Create integration manager and service with caching enabled + const integrationManager = new IntegrationManager(); + + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(tempDir, "facts"), + }, + cache: { + enabled: true, + ttl: 300000, // 5 minutes + maxEntries: 1000, + }, + }; + + const service = new HieraService(integrationManager, config); + + return { tempDir, service, integrationManager }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + it("should return equivalent results from cache and fresh computation", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 3 }), + fc.array(hieraKeyArb, { minLength: 1, maxLength: 5 }), + fc.array(simpleValueArb, { minLength: 1, maxLength: 5 }), + async (nodes, keys, values) => { + const uniqueNodes = [...new Set(nodes)]; + const uniqueKeys = [...new Set(keys)]; + if (uniqueNodes.length === 0 || uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment(uniqueNodes, uniqueKeys, keyValues); + + try { + await service.initialize(); + + // First call - populates cache + const firstResults = new Map(); + for (const nodeId of uniqueNodes) { + for (const key of uniqueKeys) { + const resolution = await service.resolveKey(nodeId, key); + firstResults.set(`${nodeId}:${key}`, resolution.resolvedValue); + } + } + + // Second call - should use cache + const cachedResults = new Map(); + for (const nodeId of uniqueNodes) { + for (const key of uniqueKeys) { + const resolution = await service.resolveKey(nodeId, key); + cachedResults.set(`${nodeId}:${key}`, resolution.resolvedValue); + } + } + + // Results should be equivalent + for (const [cacheKey, firstValue] of firstResults) { + const cachedValue = cachedResults.get(cacheKey); + expect(JSON.stringify(cachedValue)).toBe(JSON.stringify(firstValue)); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should return fresh results after cache invalidation", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 1, maxLength: 2 }), + hieraKeyArb, + simpleValueArb, + async (nodes, key, value) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length === 0) return; + + const keyValues = new Map([[key, value]]); + const { tempDir, service } = createTestEnvironment(uniqueNodes, [key], keyValues); + + try { + await service.initialize(); + + // First call - populates cache + for (const nodeId of uniqueNodes) { + await service.resolveKey(nodeId, key); + } + + // Verify cache is populated + let stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBeGreaterThan(0); + + // Invalidate cache + service.invalidateCache(); + + // Verify cache is cleared + stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(0); + + // Third call - should compute fresh results + for (const nodeId of uniqueNodes) { + const resolution = await service.resolveKey(nodeId, key); + expect(JSON.stringify(resolution.resolvedValue)).toBe(JSON.stringify(value)); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should cache getAllKeys results correctly", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 10 }), + fc.array(simpleValueArb, { minLength: 1, maxLength: 10 }), + async (keys, values) => { + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment( + ["test-node.example.com"], + uniqueKeys, + keyValues + ); + + try { + await service.initialize(); + + // First call + const firstKeyIndex = await service.getAllKeys(); + + // Second call - should return same reference (cached) + const secondKeyIndex = await service.getAllKeys(); + + // Should be the same object reference + expect(firstKeyIndex).toBe(secondKeyIndex); + + // Should have correct number of keys + expect(firstKeyIndex.totalKeys).toBe(uniqueKeys.length); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should cache node data correctly", async () => { + await fc.assert( + fc.asyncProperty( + nodeNameArb, + fc.array(hieraKeyArb, { minLength: 1, maxLength: 5 }), + fc.array(simpleValueArb, { minLength: 1, maxLength: 5 }), + async (nodeId, keys, values) => { + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment([nodeId], uniqueKeys, keyValues); + + try { + await service.initialize(); + + // First call + const firstNodeData = await service.getNodeHieraData(nodeId); + + // Verify cache is populated + let stats = service.getCacheStats(); + expect(stats.nodeDataCacheSize).toBe(1); + + // Second call - should use cache + const secondNodeData = await service.getNodeHieraData(nodeId); + + // Should be the same object reference + expect(firstNodeData).toBe(secondNodeData); + + // Data should be correct + expect(firstNodeData.nodeId).toBe(nodeId); + expect(firstNodeData.keys.size).toBe(uniqueKeys.length); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should maintain cache consistency across multiple operations", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 4 }), + fc.array(hieraKeyArb, { minLength: 2, maxLength: 5 }), + fc.array(simpleValueArb, { minLength: 2, maxLength: 5 }), + async (nodes, keys, values) => { + const uniqueNodes = [...new Set(nodes)]; + const uniqueKeys = [...new Set(keys)]; + if (uniqueNodes.length < 2 || uniqueKeys.length < 2) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment(uniqueNodes, uniqueKeys, keyValues); + + try { + await service.initialize(); + + // Perform various operations + await service.getAllKeys(); + + for (const nodeId of uniqueNodes) { + await service.resolveKey(nodeId, uniqueKeys[0]); + } + + await service.getNodeHieraData(uniqueNodes[0]); + + // Verify cache stats are consistent + const stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(true); + expect(stats.resolutionCacheSize).toBeGreaterThan(0); + expect(stats.nodeDataCacheSize).toBeGreaterThan(0); + + // Invalidate specific node cache + service.invalidateNodeCache(uniqueNodes[0]); + + // Node data cache should be reduced + const statsAfter = service.getCacheStats(); + expect(statsAfter.nodeDataCacheSize).toBe(0); + + // Key index should still be cached + expect(statsAfter.keyIndexCached).toBe(true); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-29.test.ts b/backend/test/properties/hiera/property-29.test.ts new file mode 100644 index 0000000..086e491 --- /dev/null +++ b/backend/test/properties/hiera/property-29.test.ts @@ -0,0 +1,429 @@ +/** + * Feature: hiera-codebase-integration, Property 29: Cache Invalidation on File Change + * Validates: Requirements 15.2 + * + * This property test verifies that: + * When a hieradata file changes, all cached values derived from that file + * SHALL be invalidated and subsequent lookups SHALL return fresh data. + */ + +import { describe, it, expect } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import * as yaml from "yaml"; +import { HieraService, type HieraServiceConfig } from "../../../src/integrations/hiera/HieraService"; +import { IntegrationManager } from "../../../src/integrations/IntegrationManager"; + +describe("Property 29: Cache Invalidation on File Change", () => { + const propertyTestConfig = { + numRuns: 50, + verbose: false, + }; + + // Generator for valid key name parts + const keyPartArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for Hiera key names + const hieraKeyArb = fc.array(keyPartArb, { minLength: 1, maxLength: 3 }) + .map((parts) => parts.join("::")); + + // Generator for node names + const nodeNameArb = fc.string({ minLength: 1, maxLength: 10 }) + .filter((s) => /^[a-z][a-z0-9-]*$/.test(s)) + .map((name) => `${name}.example.com`); + + // Generator for simple string values + const simpleValueArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter((s) => !s.includes("%{") && !s.includes("\n") && !s.includes(":")); + + // Helper to create a temp directory with test structure + function createTestEnvironment( + nodes: string[], + keys: string[], + keyValues: Map + ): { tempDir: string; service: HieraService; integrationManager: IntegrationManager } { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-invalidation-test-")); + + // Create directories + fs.mkdirSync(path.join(tempDir, "data"), { recursive: true }); + fs.mkdirSync(path.join(tempDir, "facts"), { recursive: true }); + + // Create hiera.yaml + const hieraConfig = ` +version: 5 +defaults: + datadir: data + data_hash: yaml_data +hierarchy: + - name: "Common data" + path: "common.yaml" +`; + fs.writeFileSync(path.join(tempDir, "hiera.yaml"), hieraConfig); + + // Create common.yaml with all keys + const commonData: Record = {}; + for (const key of keys) { + commonData[key] = keyValues.get(key) ?? "default_value"; + } + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + + // Create fact files for nodes + for (const nodeId of nodes) { + const hostname = nodeId.split(".")[0]; + const factData = { + name: nodeId, + values: { + networking: { + hostname, + fqdn: nodeId, + }, + }, + }; + fs.writeFileSync( + path.join(tempDir, "facts", `${nodeId}.json`), + JSON.stringify(factData, null, 2) + ); + } + + // Create integration manager and service with caching enabled + const integrationManager = new IntegrationManager(); + + const config: HieraServiceConfig = { + controlRepoPath: tempDir, + hieraConfigPath: "hiera.yaml", + hieradataPath: "data", + factSources: { + preferPuppetDB: false, + localFactsPath: path.join(tempDir, "facts"), + }, + cache: { + enabled: true, + ttl: 300000, // 5 minutes + maxEntries: 1000, + }, + }; + + const service = new HieraService(integrationManager, config); + + return { tempDir, service, integrationManager }; + } + + // Helper to cleanup temp directory + function cleanupTestEnvironment(tempDir: string): void { + try { + fs.rmSync(tempDir, { recursive: true, force: true }); + } catch { + // Ignore cleanup errors + } + } + + // Helper to update a hieradata file + function updateHieradataFile( + tempDir: string, + keys: string[], + newValues: Map + ): void { + const commonData: Record = {}; + for (const key of keys) { + commonData[key] = newValues.get(key) ?? "updated_value"; + } + fs.writeFileSync(path.join(tempDir, "data", "common.yaml"), yaml.stringify(commonData)); + } + + it("should invalidate cache when file changes are detected", async () => { + await fc.assert( + fc.asyncProperty( + nodeNameArb, + hieraKeyArb, + simpleValueArb, + simpleValueArb, + async (nodeId, key, initialValue, newValue) => { + // Ensure values are different + if (initialValue === newValue) return; + + const keyValues = new Map([[key, initialValue]]); + const { tempDir, service } = createTestEnvironment([nodeId], [key], keyValues); + + try { + await service.initialize(); + + // First call - populates cache + const firstResolution = await service.resolveKey(nodeId, key); + expect(firstResolution.resolvedValue).toBe(initialValue); + + // Verify cache is populated + let stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBeGreaterThan(0); + + // Simulate file change by calling handleFileChanges through the scanner callback + // Update the file first + const newKeyValues = new Map([[key, newValue]]); + updateHieradataFile(tempDir, [key], newKeyValues); + + // Trigger cache invalidation (simulating file watcher callback) + // We access the scanner and trigger the change notification + const scanner = service.getScanner(); + + // Rescan the file to pick up changes + await scanner.rescanFiles(["data/common.yaml"]); + + // Manually invalidate cache (simulating what handleFileChanges does) + service.invalidateCache(); + + // Verify cache is cleared + stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(0); + + // Next call should return fresh data + const freshResolution = await service.resolveKey(nodeId, key); + expect(freshResolution.resolvedValue).toBe(newValue); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should invalidate node data cache when underlying data changes", async () => { + await fc.assert( + fc.asyncProperty( + nodeNameArb, + fc.array(hieraKeyArb, { minLength: 1, maxLength: 3 }), + fc.array(simpleValueArb, { minLength: 1, maxLength: 3 }), + async (nodeId, keys, values) => { + const uniqueKeys = [...new Set(keys)]; + if (uniqueKeys.length === 0) return; + + const keyValues = new Map(); + for (let i = 0; i < uniqueKeys.length; i++) { + keyValues.set(uniqueKeys[i], values[i % values.length]); + } + + const { tempDir, service } = createTestEnvironment([nodeId], uniqueKeys, keyValues); + + try { + await service.initialize(); + + // Get node data - populates cache + const firstNodeData = await service.getNodeHieraData(nodeId); + expect(firstNodeData.nodeId).toBe(nodeId); + + // Verify node data cache is populated + let stats = service.getCacheStats(); + expect(stats.nodeDataCacheSize).toBe(1); + + // Update file with new values + const newKeyValues = new Map(); + for (const key of uniqueKeys) { + newKeyValues.set(key, `updated_${key}`); + } + updateHieradataFile(tempDir, uniqueKeys, newKeyValues); + + // Rescan and invalidate + const scanner = service.getScanner(); + await scanner.rescanFiles(["data/common.yaml"]); + service.invalidateCache(); + + // Verify cache is cleared + stats = service.getCacheStats(); + expect(stats.nodeDataCacheSize).toBe(0); + + // Get fresh node data + const freshNodeData = await service.getNodeHieraData(nodeId); + + // Verify values are updated + for (const key of uniqueKeys) { + const resolution = freshNodeData.keys.get(key); + expect(resolution?.resolvedValue).toBe(`updated_${key}`); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should invalidate key index cache when files change", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(hieraKeyArb, { minLength: 1, maxLength: 5 }), + hieraKeyArb, + simpleValueArb, + async (initialKeys, newKey, value) => { + const uniqueKeys = [...new Set(initialKeys)]; + if (uniqueKeys.length === 0) return; + // Ensure new key is different from existing keys + if (uniqueKeys.includes(newKey)) return; + + const keyValues = new Map(); + for (const key of uniqueKeys) { + keyValues.set(key, value); + } + + const { tempDir, service } = createTestEnvironment( + ["test-node.example.com"], + uniqueKeys, + keyValues + ); + + try { + await service.initialize(); + + // Get all keys - populates cache + const firstKeyIndex = await service.getAllKeys(); + expect(firstKeyIndex.totalKeys).toBe(uniqueKeys.length); + + // Verify key index is cached + let stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(true); + + // Add a new key to the file + const newKeyValues = new Map(keyValues); + newKeyValues.set(newKey, "new_value"); + updateHieradataFile(tempDir, [...uniqueKeys, newKey], newKeyValues); + + // Rescan and invalidate + const scanner = service.getScanner(); + await scanner.rescanFiles(["data/common.yaml"]); + service.invalidateCache(); + + // Verify key index cache is cleared + stats = service.getCacheStats(); + expect(stats.keyIndexCached).toBe(false); + + // Get fresh key index + const freshKeyIndex = await service.getAllKeys(); + expect(freshKeyIndex.totalKeys).toBe(uniqueKeys.length + 1); + + // Verify new key is present + expect(freshKeyIndex.keys.has(newKey)).toBe(true); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should handle multiple file changes correctly", async () => { + await fc.assert( + fc.asyncProperty( + nodeNameArb, + hieraKeyArb, + fc.array(simpleValueArb, { minLength: 3, maxLength: 5 }), + async (nodeId, key, valueSequence) => { + const uniqueValues = [...new Set(valueSequence)]; + if (uniqueValues.length < 2) return; + + const keyValues = new Map([[key, uniqueValues[0]]]); + const { tempDir, service } = createTestEnvironment([nodeId], [key], keyValues); + + try { + await service.initialize(); + + // Track all resolved values + const resolvedValues: string[] = []; + + // Initial resolution + const initial = await service.resolveKey(nodeId, key); + resolvedValues.push(initial.resolvedValue as string); + + // Perform multiple updates + for (let i = 1; i < uniqueValues.length; i++) { + const newValue = uniqueValues[i]; + const newKeyValues = new Map([[key, newValue]]); + updateHieradataFile(tempDir, [key], newKeyValues); + + // Rescan and invalidate + const scanner = service.getScanner(); + await scanner.rescanFiles(["data/common.yaml"]); + service.invalidateCache(); + + // Resolve again + const resolution = await service.resolveKey(nodeId, key); + resolvedValues.push(resolution.resolvedValue as string); + } + + // Verify each resolution returned the correct value + for (let i = 0; i < uniqueValues.length; i++) { + expect(resolvedValues[i]).toBe(uniqueValues[i]); + } + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); + + it("should preserve cache for unaffected nodes after partial invalidation", async () => { + await fc.assert( + fc.asyncProperty( + fc.array(nodeNameArb, { minLength: 2, maxLength: 3 }), + hieraKeyArb, + simpleValueArb, + async (nodes, key, value) => { + const uniqueNodes = [...new Set(nodes)]; + if (uniqueNodes.length < 2) return; + + const keyValues = new Map([[key, value]]); + const { tempDir, service } = createTestEnvironment(uniqueNodes, [key], keyValues); + + try { + await service.initialize(); + + // First get all keys to populate key index cache + await service.getAllKeys(); + + // Populate resolution cache for all nodes + for (const nodeId of uniqueNodes) { + await service.resolveKey(nodeId, key); + } + + // Verify cache is populated + let stats = service.getCacheStats(); + expect(stats.resolutionCacheSize).toBe(uniqueNodes.length); + expect(stats.keyIndexCached).toBe(true); + + // Invalidate cache for only the first node + service.invalidateNodeCache(uniqueNodes[0]); + + // Verify partial invalidation + stats = service.getCacheStats(); + // Resolution cache entries for first node should be removed + // Other nodes' resolution cache should remain + expect(stats.resolutionCacheSize).toBe(uniqueNodes.length - 1); + // Key index should still be cached + expect(stats.keyIndexCached).toBe(true); + + // Verify first node needs fresh resolution + const firstNodeResolution = await service.resolveKey(uniqueNodes[0], key); + expect(firstNodeResolution.resolvedValue).toBe(value); + + await service.shutdown(); + } finally { + cleanupTestEnvironment(tempDir); + } + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-3.test.ts b/backend/test/properties/hiera/property-3.test.ts new file mode 100644 index 0000000..49989f9 --- /dev/null +++ b/backend/test/properties/hiera/property-3.test.ts @@ -0,0 +1,371 @@ +/** + * Feature: hiera-codebase-integration, Property 3: Hiera Configuration Parsing Round-Trip + * Validates: Requirements 2.1, 2.2 + * + * This property test verifies that: + * For any valid Hiera 5 configuration object, serializing it to YAML and then + * parsing it back SHALL produce an equivalent configuration with all hierarchy + * levels, paths, and data providers preserved. + */ + +import { describe, it, expect } from 'vitest'; +import fc from 'fast-check'; +import { HieraParser } from '../../../src/integrations/hiera/HieraParser'; +import type { HieraConfig, HierarchyLevel, HieraDefaults } from '../../../src/integrations/hiera/types'; + +describe('Property 3: Hiera Configuration Parsing Round-Trip', () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid hierarchy level names (alphanumeric with spaces and dashes) + const hierarchyNameArb = fc.string({ minLength: 1, maxLength: 50 }) + .filter(s => /^[a-zA-Z0-9 _-]+$/.test(s) && s.trim().length > 0); + + // Generator for valid file paths (alphanumeric with path separators and extensions) + const filePathArb = fc.string({ minLength: 1, maxLength: 50 }) + .filter(s => /^[a-zA-Z0-9/_.-]+$/.test(s)) + .map(s => s.endsWith('.yaml') ? s : s + '.yaml'); + + // Generator for data directory paths + const datadirArb = fc.string({ minLength: 1, maxLength: 30 }) + .filter(s => /^[a-zA-Z0-9/_-]+$/.test(s)); + + // Generator for data_hash values + const dataHashArb = fc.constantFrom('yaml_data', 'json_data'); + + // Generator for lookup_key values + const lookupKeyArb = fc.constantFrom('eyaml_lookup_key', 'hiera_lookup_key'); + + // Generator for a single hierarchy level with path + const hierarchyLevelWithPathArb: fc.Arbitrary = fc.record({ + name: hierarchyNameArb, + path: filePathArb, + datadir: fc.option(datadirArb, { nil: undefined }), + data_hash: fc.option(dataHashArb, { nil: undefined }), + }); + + // Generator for a hierarchy level with multiple paths + const hierarchyLevelWithPathsArb: fc.Arbitrary = fc.record({ + name: hierarchyNameArb, + paths: fc.array(filePathArb, { minLength: 1, maxLength: 3 }), + datadir: fc.option(datadirArb, { nil: undefined }), + data_hash: fc.option(dataHashArb, { nil: undefined }), + }); + + // Generator for a hierarchy level with glob + const hierarchyLevelWithGlobArb: fc.Arbitrary = fc.record({ + name: hierarchyNameArb, + glob: filePathArb.map(p => p.replace('.yaml', '/*.yaml')), + datadir: fc.option(datadirArb, { nil: undefined }), + data_hash: fc.option(dataHashArb, { nil: undefined }), + }); + + // Combined hierarchy level generator + const hierarchyLevelArb: fc.Arbitrary = fc.oneof( + hierarchyLevelWithPathArb, + hierarchyLevelWithPathsArb, + hierarchyLevelWithGlobArb + ); + + // Generator for defaults + const hieraDefaultsArb: fc.Arbitrary = fc.record({ + datadir: fc.option(datadirArb, { nil: undefined }), + data_hash: fc.option(dataHashArb, { nil: undefined }), + lookup_key: fc.option(lookupKeyArb, { nil: undefined }), + }); + + // Generator for complete HieraConfig + const hieraConfigArb: fc.Arbitrary = fc.record({ + version: fc.constant(5 as const), + defaults: fc.option(hieraDefaultsArb, { nil: undefined }), + hierarchy: fc.array(hierarchyLevelArb, { minLength: 1, maxLength: 5 }), + }); + + /** + * Helper to clean undefined values from objects for comparison + */ + function cleanUndefined(obj: T): T { + if (obj === null || obj === undefined) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(cleanUndefined) as T; + } + if (typeof obj === 'object') { + const cleaned: Record = {}; + for (const [key, value] of Object.entries(obj as Record)) { + if (value !== undefined) { + cleaned[key] = cleanUndefined(value); + } + } + return cleaned as T; + } + return obj; + } + + /** + * Helper to compare hierarchy levels + */ + function compareHierarchyLevel(original: HierarchyLevel, parsed: HierarchyLevel): boolean { + // Name must match + if (original.name !== parsed.name) return false; + + // Path must match + if (original.path !== parsed.path) return false; + + // Paths array must match + if (original.paths && parsed.paths) { + if (original.paths.length !== parsed.paths.length) return false; + for (let i = 0; i < original.paths.length; i++) { + if (original.paths[i] !== parsed.paths[i]) return false; + } + } else if (original.paths !== parsed.paths) { + return false; + } + + // Glob must match + if (original.glob !== parsed.glob) return false; + + // Globs array must match + if (original.globs && parsed.globs) { + if (original.globs.length !== parsed.globs.length) return false; + for (let i = 0; i < original.globs.length; i++) { + if (original.globs[i] !== parsed.globs[i]) return false; + } + } else if (original.globs !== parsed.globs) { + return false; + } + + // Datadir must match + if (original.datadir !== parsed.datadir) return false; + + // Data hash must match + if (original.data_hash !== parsed.data_hash) return false; + + // Lookup key must match + if (original.lookup_key !== parsed.lookup_key) return false; + + return true; + } + + /** + * Helper to compare HieraConfig objects + */ + function compareConfigs(original: HieraConfig, parsed: HieraConfig): boolean { + // Version must match + if (original.version !== parsed.version) return false; + + // Hierarchy length must match + if (original.hierarchy.length !== parsed.hierarchy.length) return false; + + // Compare each hierarchy level + for (let i = 0; i < original.hierarchy.length; i++) { + if (!compareHierarchyLevel(original.hierarchy[i], parsed.hierarchy[i])) { + return false; + } + } + + // Compare defaults + const origDefaults = cleanUndefined(original.defaults); + const parsedDefaults = cleanUndefined(parsed.defaults); + + if (origDefaults && parsedDefaults) { + if (origDefaults.datadir !== parsedDefaults.datadir) return false; + if (origDefaults.data_hash !== parsedDefaults.data_hash) return false; + if (origDefaults.lookup_key !== parsedDefaults.lookup_key) return false; + } else if ((origDefaults && Object.keys(origDefaults).length > 0) !== + (parsedDefaults && Object.keys(parsedDefaults).length > 0)) { + return false; + } + + return true; + } + + it('should preserve all hierarchy levels after round-trip for any valid config', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(hieraConfigArb, (originalConfig) => { + // Serialize to YAML + const yaml = parser.serializeConfig(originalConfig); + + // Parse back + const parseResult = parser.parseContent(yaml); + + // Should parse successfully + expect(parseResult.success).toBe(true); + expect(parseResult.config).toBeDefined(); + + const parsedConfig = parseResult.config!; + + // Version should be preserved + expect(parsedConfig.version).toBe(originalConfig.version); + + // Hierarchy length should be preserved + expect(parsedConfig.hierarchy.length).toBe(originalConfig.hierarchy.length); + + // Each hierarchy level should be preserved + for (let i = 0; i < originalConfig.hierarchy.length; i++) { + const origLevel = originalConfig.hierarchy[i]; + const parsedLevel = parsedConfig.hierarchy[i]; + + expect(parsedLevel.name).toBe(origLevel.name); + + if (origLevel.path) { + expect(parsedLevel.path).toBe(origLevel.path); + } + if (origLevel.paths) { + expect(parsedLevel.paths).toEqual(origLevel.paths); + } + if (origLevel.glob) { + expect(parsedLevel.glob).toBe(origLevel.glob); + } + if (origLevel.datadir) { + expect(parsedLevel.datadir).toBe(origLevel.datadir); + } + if (origLevel.data_hash) { + expect(parsedLevel.data_hash).toBe(origLevel.data_hash); + } + } + }), + propertyTestConfig + ); + }); + + it('should preserve defaults after round-trip for any valid config with defaults', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + const configWithDefaultsArb = fc.record({ + version: fc.constant(5 as const), + defaults: hieraDefaultsArb, + hierarchy: fc.array(hierarchyLevelArb, { minLength: 1, maxLength: 3 }), + }); + + fc.assert( + fc.property(configWithDefaultsArb, (originalConfig) => { + // Serialize to YAML + const yaml = parser.serializeConfig(originalConfig); + + // Parse back + const parseResult = parser.parseContent(yaml); + + // Should parse successfully + expect(parseResult.success).toBe(true); + expect(parseResult.config).toBeDefined(); + + const parsedConfig = parseResult.config!; + + // Defaults should be preserved + if (originalConfig.defaults) { + if (originalConfig.defaults.datadir) { + expect(parsedConfig.defaults?.datadir).toBe(originalConfig.defaults.datadir); + } + if (originalConfig.defaults.data_hash) { + expect(parsedConfig.defaults?.data_hash).toBe(originalConfig.defaults.data_hash); + } + if (originalConfig.defaults.lookup_key) { + expect(parsedConfig.defaults?.lookup_key).toBe(originalConfig.defaults.lookup_key); + } + } + }), + propertyTestConfig + ); + }); + + it('should produce equivalent configs after round-trip for any valid config', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(hieraConfigArb, (originalConfig) => { + // Serialize to YAML + const yaml = parser.serializeConfig(originalConfig); + + // Parse back + const parseResult = parser.parseContent(yaml); + + // Should parse successfully + expect(parseResult.success).toBe(true); + expect(parseResult.config).toBeDefined(); + + // Configs should be equivalent + expect(compareConfigs(originalConfig, parseResult.config!)).toBe(true); + }), + propertyTestConfig + ); + }); + + it('should handle configs with multiple paths arrays after round-trip', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + const multiPathConfigArb = fc.record({ + version: fc.constant(5 as const), + hierarchy: fc.array(hierarchyLevelWithPathsArb, { minLength: 1, maxLength: 3 }), + }); + + fc.assert( + fc.property(multiPathConfigArb, (originalConfig) => { + // Serialize to YAML + const yaml = parser.serializeConfig(originalConfig); + + // Parse back + const parseResult = parser.parseContent(yaml); + + // Should parse successfully + expect(parseResult.success).toBe(true); + expect(parseResult.config).toBeDefined(); + + const parsedConfig = parseResult.config!; + + // Each hierarchy level's paths array should be preserved + for (let i = 0; i < originalConfig.hierarchy.length; i++) { + const origLevel = originalConfig.hierarchy[i]; + const parsedLevel = parsedConfig.hierarchy[i]; + + if (origLevel.paths) { + expect(parsedLevel.paths).toBeDefined(); + expect(parsedLevel.paths).toEqual(origLevel.paths); + } + } + }), + propertyTestConfig + ); + }); + + it('should handle configs with glob patterns after round-trip', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + const globConfigArb = fc.record({ + version: fc.constant(5 as const), + hierarchy: fc.array(hierarchyLevelWithGlobArb, { minLength: 1, maxLength: 3 }), + }); + + fc.assert( + fc.property(globConfigArb, (originalConfig) => { + // Serialize to YAML + const yaml = parser.serializeConfig(originalConfig); + + // Parse back + const parseResult = parser.parseContent(yaml); + + // Should parse successfully + expect(parseResult.success).toBe(true); + expect(parseResult.config).toBeDefined(); + + const parsedConfig = parseResult.config!; + + // Each hierarchy level's glob should be preserved + for (let i = 0; i < originalConfig.hierarchy.length; i++) { + const origLevel = originalConfig.hierarchy[i]; + const parsedLevel = parsedConfig.hierarchy[i]; + + if (origLevel.glob) { + expect(parsedLevel.glob).toBe(origLevel.glob); + } + } + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-4.test.ts b/backend/test/properties/hiera/property-4.test.ts new file mode 100644 index 0000000..12e5bed --- /dev/null +++ b/backend/test/properties/hiera/property-4.test.ts @@ -0,0 +1,366 @@ +/** + * Feature: hiera-codebase-integration, Property 4: Hiera Parser Error Reporting + * Validates: Requirements 2.5 + * + * This property test verifies that: + * For any YAML string containing syntax errors, the Hiera_Parser SHALL return + * an error result that includes the line number where the error occurs. + */ + +import { describe, it, expect } from 'vitest'; +import fc from 'fast-check'; +import { HieraParser } from '../../../src/integrations/hiera/HieraParser'; +import { HIERA_ERROR_CODES } from '../../../src/integrations/hiera/types'; + +describe('Property 4: Hiera Parser Error Reporting', () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid YAML key names + const yamlKeyArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter(s => /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(s)); + + // Generator for valid YAML string values (non-empty, no special chars) + const yamlValueArb = fc.string({ minLength: 1, maxLength: 30 }) + .filter(s => /^[a-zA-Z0-9_]+$/.test(s)); + + /** + * Generator for YAML with duplicate keys at a specific line + * Duplicate keys are a YAML syntax error when strict mode is enabled + */ + const duplicateKeyYamlArb = fc.tuple( + fc.integer({ min: 0, max: 5 }), // Number of valid lines before duplicate + yamlKeyArb, // The key that will be duplicated + yamlValueArb, // First value + yamlValueArb, // Second value (duplicate key) + ).map(([prefixLines, key, value1, value2]) => { + const lines: string[] = []; + + // Add version line (required for Hiera) + lines.push('version: 5'); + + // Add some valid lines + for (let i = 0; i < prefixLines; i++) { + lines.push(`key_${i}: value_${i}`); + } + + // Add the first occurrence of the key + lines.push(`${key}: ${value1}`); + + // Add the duplicate key (this should cause an error) + const duplicateLine = lines.length + 1; // 1-indexed line number + lines.push(`${key}: ${value2}`); + + return { + yaml: lines.join('\n'), + expectedErrorLine: duplicateLine, + }; + }); + + /** + * Generator for YAML with truly unclosed quotes (multiline string without proper termination) + * This creates YAML that will definitely fail to parse + */ + const unclosedQuoteYamlArb = fc.tuple( + fc.integer({ min: 0, max: 3 }), + yamlKeyArb, + yamlValueArb, + ).map(([prefixLines, key, value]) => { + const lines: string[] = []; + + lines.push('version: 5'); + + for (let i = 0; i < prefixLines; i++) { + lines.push(`key_${i}: value_${i}`); + } + + // Add unclosed quote that spans to next line with invalid content + const errorLine = lines.length + 1; + lines.push(`${key}: "${value}`); + lines.push(` invalid: content`); // This makes the unclosed quote a real error + + return { + yaml: lines.join('\n'), + expectedErrorLine: errorLine, + }; + }); + + /** + * Generator for YAML with invalid block scalar indicators + */ + const invalidBlockScalarYamlArb = fc.tuple( + fc.integer({ min: 0, max: 3 }), + yamlKeyArb, + ).map(([prefixLines, key]) => { + const lines: string[] = []; + + lines.push('version: 5'); + + for (let i = 0; i < prefixLines; i++) { + lines.push(`key_${i}: value_${i}`); + } + + // Add invalid block scalar (| or > followed by invalid indicator) + const errorLine = lines.length + 1; + lines.push(`${key}: |invalid`); // Invalid block scalar indicator + + return { + yaml: lines.join('\n'), + expectedErrorLine: errorLine, + }; + }); + + /** + * Generator for YAML with invalid mapping syntax + */ + const invalidMappingYamlArb = fc.tuple( + fc.integer({ min: 0, max: 3 }), + ).map(([prefixLines]) => { + const lines: string[] = []; + + lines.push('version: 5'); + + for (let i = 0; i < prefixLines; i++) { + lines.push(`key_${i}: value_${i}`); + } + + // Add invalid mapping (key without value followed by invalid structure) + const errorLine = lines.length + 1; + lines.push(`invalid_key`); // Key without colon + lines.push(` : orphan_value`); // Orphan value + + return { + yaml: lines.join('\n'), + expectedErrorLine: errorLine, + }; + }); + + it('should return error with line number for YAML with duplicate keys', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(duplicateKeyYamlArb, ({ yaml }) => { + const result = parser.parseContent(yaml, 'test-hiera.yaml'); + + // Should fail to parse + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + + // Error message should be descriptive + expect(result.error!.message).toBeTruthy(); + expect(result.error!.message.length).toBeGreaterThan(0); + + // Should include file in details + expect(result.error!.details?.file).toBe('test-hiera.yaml'); + + // Should include line number in details + expect(result.error!.details?.line).toBeDefined(); + expect(typeof result.error!.details?.line).toBe('number'); + expect(result.error!.details!.line).toBeGreaterThan(0); + }), + propertyTestConfig + ); + }); + + it('should return error with line number for YAML with unclosed quotes', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(unclosedQuoteYamlArb, ({ yaml }) => { + const result = parser.parseContent(yaml, 'test-hiera.yaml'); + + // Should fail to parse + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + + // Error message should be descriptive + expect(result.error!.message).toBeTruthy(); + + // Should include file in details + expect(result.error!.details?.file).toBe('test-hiera.yaml'); + + // Should include line number in details + expect(result.error!.details?.line).toBeDefined(); + expect(typeof result.error!.details?.line).toBe('number'); + expect(result.error!.details!.line).toBeGreaterThan(0); + }), + propertyTestConfig + ); + }); + + it('should return error with line number for any YAML syntax error', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + // Combined generator for various syntax errors that produce line numbers + const syntaxErrorYamlArb = fc.oneof( + duplicateKeyYamlArb, + invalidBlockScalarYamlArb, + ); + + fc.assert( + fc.property(syntaxErrorYamlArb, ({ yaml }) => { + const result = parser.parseContent(yaml, 'malformed.yaml'); + + // Should fail to parse + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + + // Error code should be PARSE_ERROR + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + + // Error message should contain useful information + expect(result.error!.message).toBeTruthy(); + expect(result.error!.message.length).toBeGreaterThan(10); + + // Details should include file path + expect(result.error!.details?.file).toBe('malformed.yaml'); + + // Details should include line number for YAML syntax errors + expect(result.error!.details?.line).toBeDefined(); + expect(typeof result.error!.details?.line).toBe('number'); + expect(result.error!.details!.line).toBeGreaterThan(0); + }), + propertyTestConfig + ); + }); + + it('should return descriptive error message for syntax errors', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(duplicateKeyYamlArb, ({ yaml }) => { + const result = parser.parseContent(yaml, 'test.yaml'); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + + // Message should mention syntax or YAML + const message = result.error!.message.toLowerCase(); + const hasSyntaxInfo = message.includes('syntax') || + message.includes('yaml') || + message.includes('duplicate') || + message.includes('error'); + expect(hasSyntaxInfo).toBe(true); + }), + propertyTestConfig + ); + }); + + it('should include suggestion in error details when available', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + // Test with a specific known error type + const invalidVersionYaml = ` +version: 3 +hierarchy: + - name: common + path: common.yaml +`; + + const result = parser.parseContent(invalidVersionYaml, 'test.yaml'); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.details?.suggestion).toBeDefined(); + expect(result.error!.details!.suggestion!.length).toBeGreaterThan(0); + }); + + it('should handle empty content gracefully', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + const emptyContentArb = fc.constantFrom('', ' ', '\n', '\n\n', ' \n '); + + fc.assert( + fc.property(emptyContentArb, (content) => { + const result = parser.parseContent(content, 'empty.yaml'); + + // Should fail (empty is not valid Hiera config) + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + expect(result.error!.details?.file).toBe('empty.yaml'); + }), + propertyTestConfig + ); + }); + + it('should return error for non-object YAML content', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + // YAML that parses to non-object types - these will fail validation + const nonObjectYamlArb = fc.constantFrom( + '"just a string"', // String + '42', // Number + 'true', // Boolean + ); + + fc.assert( + fc.property(nonObjectYamlArb, (yaml) => { + const result = parser.parseContent(yaml, 'invalid.yaml'); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + // Message should indicate the problem (object expected or version issue) + expect(result.error!.message.length).toBeGreaterThan(0); + expect(result.error!.details?.file).toBe('invalid.yaml'); + }), + propertyTestConfig + ); + }); + + it('should return error with line info for missing required fields', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + // Valid YAML but missing required Hiera fields + const missingFieldsYamlArb = fc.constantFrom( + 'version: 5', // Missing hierarchy + 'hierarchy:\n - name: test', // Missing version + 'version: 5\nhierarchy: "not an array"', // hierarchy not array + 'version: 5\nhierarchy:\n - path: test.yaml', // Missing name in level + ); + + fc.assert( + fc.property(missingFieldsYamlArb, (yaml) => { + const result = parser.parseContent(yaml, 'incomplete.yaml'); + + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + expect(result.error!.details?.file).toBe('incomplete.yaml'); + + // Message should indicate what's missing or wrong + expect(result.error!.message.length).toBeGreaterThan(0); + }), + propertyTestConfig + ); + }); + + it('should return error with line number for invalid block scalar syntax', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(invalidBlockScalarYamlArb, ({ yaml }) => { + const result = parser.parseContent(yaml, 'block-error.yaml'); + + // Should fail to parse + expect(result.success).toBe(false); + expect(result.error).toBeDefined(); + expect(result.error!.code).toBe(HIERA_ERROR_CODES.PARSE_ERROR); + + // Should include file in details + expect(result.error!.details?.file).toBe('block-error.yaml'); + + // Should include line number in details + expect(result.error!.details?.line).toBeDefined(); + expect(typeof result.error!.details?.line).toBe('number'); + expect(result.error!.details!.line).toBeGreaterThan(0); + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-5.test.ts b/backend/test/properties/hiera/property-5.test.ts new file mode 100644 index 0000000..dd7af9f --- /dev/null +++ b/backend/test/properties/hiera/property-5.test.ts @@ -0,0 +1,389 @@ +/** + * Feature: hiera-codebase-integration, Property 5: Hierarchy Path Interpolation + * Validates: Requirements 2.6 + * + * This property test verifies that: + * For any hierarchy path template containing fact variables (e.g., %{facts.os.family}) + * and any valid fact set, interpolating the path SHALL replace all variables with + * their corresponding fact values. + * + * Supported variable syntaxes: + * - %{facts.xxx} - Hiera 5 fact syntax + * - %{::xxx} - Legacy top-scope variable syntax + * - %{trusted.xxx} - Trusted facts syntax + * - %{server_facts.xxx} - Server facts syntax + */ + +import { describe, it, expect } from 'vitest'; +import fc from 'fast-check'; +import { HieraParser } from '../../../src/integrations/hiera/HieraParser'; +import type { Facts } from '../../../src/integrations/hiera/types'; + +describe('Property 5: Hierarchy Path Interpolation', () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + // Generator for valid fact names (alphanumeric with underscores) + const factNameArb = fc.string({ minLength: 1, maxLength: 20 }) + .filter(s => /^[a-z][a-z0-9_]*$/.test(s)); + + // Generator for valid fact values (strings that are safe for paths) + const factValueArb = fc.string({ minLength: 1, maxLength: 30 }) + .filter(s => /^[a-zA-Z0-9_-]+$/.test(s)); + + // Generator for nested fact paths (e.g., "os.family", "networking.ip") + const nestedFactPathArb = fc.array(factNameArb, { minLength: 1, maxLength: 3 }) + .map(parts => parts.join('.')); + + // Generator for simple facts (flat key-value pairs) + const simpleFacts = fc.dictionary(factNameArb, factValueArb, { minKeys: 1, maxKeys: 5 }); + + // Generator for nested facts (e.g., os: { family: 'RedHat' }) + const nestedFactsArb = fc.record({ + os: fc.record({ + family: factValueArb, + name: factValueArb, + release: fc.record({ + major: fc.integer({ min: 1, max: 20 }).map(String), + minor: fc.integer({ min: 0, max: 10 }).map(String), + }), + }), + networking: fc.record({ + hostname: factValueArb, + domain: factValueArb, + ip: fc.ipV4(), + }), + environment: factValueArb, + hostname: factValueArb, + fqdn: factValueArb, + }); + + // Generator for trusted facts + const trustedFactsArb = fc.record({ + certname: factValueArb, + domain: factValueArb, + hostname: factValueArb, + }); + + // Generator for server facts + const serverFactsArb = fc.record({ + serverversion: factValueArb, + servername: factValueArb, + }); + + /** + * Helper to create a Facts object from raw facts + */ + function createFacts(rawFacts: Record): Facts { + return { + nodeId: 'test-node', + gatheredAt: new Date().toISOString(), + facts: rawFacts, + }; + } + + /** + * Helper to get nested value from object + * Uses Object.hasOwn() to prevent prototype pollution attacks + */ + function getNestedValue(obj: Record, path: string): unknown { + const parts = path.split('.'); + let current: unknown = obj; + for (const part of parts) { + if (current === null || current === undefined || typeof current !== 'object') { + return undefined; + } + // Use Object.hasOwn to prevent prototype pollution + if (!Object.hasOwn(current as Record, part)) { + return undefined; + } + current = (current as Record)[part]; + } + return current; + } + + it('should replace %{facts.xxx} variables with corresponding fact values', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(nestedFactsArb, (rawFacts) => { + const facts = createFacts(rawFacts); + + // Test with os.family + const template1 = 'nodes/%{facts.os.family}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`nodes/${rawFacts.os.family}.yaml`); + + // Test with hostname + const template2 = 'nodes/%{facts.hostname}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`nodes/${rawFacts.hostname}.yaml`); + + // Test with nested os.release.major + const template3 = 'os/%{facts.os.name}/%{facts.os.release.major}.yaml'; + const result3 = parser.interpolatePath(template3, facts); + expect(result3).toBe(`os/${rawFacts.os.name}/${rawFacts.os.release.major}.yaml`); + }), + propertyTestConfig + ); + }); + + it('should replace %{::xxx} legacy syntax with corresponding fact values', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(nestedFactsArb, (rawFacts) => { + const facts = createFacts(rawFacts); + + // Test with ::hostname (legacy syntax) + const template1 = 'nodes/%{::hostname}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`nodes/${rawFacts.hostname}.yaml`); + + // Test with ::environment + const template2 = 'environments/%{::environment}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`environments/${rawFacts.environment}.yaml`); + + // Test with nested ::os.family + const template3 = 'os/%{::os.family}.yaml'; + const result3 = parser.interpolatePath(template3, facts); + expect(result3).toBe(`os/${rawFacts.os.family}.yaml`); + }), + propertyTestConfig + ); + }); + + it('should replace %{trusted.xxx} variables with trusted fact values', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(trustedFactsArb, (trustedFacts) => { + const facts = createFacts({ trusted: trustedFacts }); + + // Test with trusted.certname + const template1 = 'nodes/%{trusted.certname}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`nodes/${trustedFacts.certname}.yaml`); + + // Test with trusted.domain + const template2 = 'domains/%{trusted.domain}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`domains/${trustedFacts.domain}.yaml`); + }), + propertyTestConfig + ); + }); + + it('should replace %{server_facts.xxx} variables with server fact values', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(serverFactsArb, (serverFacts) => { + const facts = createFacts({ server_facts: serverFacts }); + + // Test with server_facts.serverversion + const template1 = 'puppet/%{server_facts.serverversion}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`puppet/${serverFacts.serverversion}.yaml`); + + // Test with server_facts.servername + const template2 = 'servers/%{server_facts.servername}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`servers/${serverFacts.servername}.yaml`); + }), + propertyTestConfig + ); + }); + + it('should handle multiple variables in a single path template', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(nestedFactsArb, (rawFacts) => { + const facts = createFacts(rawFacts); + + // Template with multiple variables + const template = '%{facts.os.family}/%{facts.os.name}/%{facts.hostname}.yaml'; + const result = parser.interpolatePath(template, facts); + expect(result).toBe(`${rawFacts.os.family}/${rawFacts.os.name}/${rawFacts.hostname}.yaml`); + }), + propertyTestConfig + ); + }); + + it('should preserve unresolved variables when fact is not found', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(simpleFacts, (rawFacts) => { + const facts = createFacts(rawFacts); + + // Template with non-existent fact + const template = 'nodes/%{facts.nonexistent_fact}.yaml'; + const result = parser.interpolatePath(template, facts); + + // Should preserve the original variable syntax when fact doesn't exist + expect(result).toBe('nodes/%{facts.nonexistent_fact}.yaml'); + }), + propertyTestConfig + ); + }); + + it('should handle paths without variables unchanged', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(simpleFacts, (rawFacts) => { + const facts = createFacts(rawFacts); + + // Template without variables + const template = 'common/defaults.yaml'; + const result = parser.interpolatePath(template, facts); + + // Should return unchanged + expect(result).toBe('common/defaults.yaml'); + }), + propertyTestConfig + ); + }); + + it('should handle mixed variable syntaxes in the same template', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property( + fc.tuple(nestedFactsArb, trustedFactsArb), + ([rawFacts, trustedFacts]) => { + const facts = createFacts({ + ...rawFacts, + trusted: trustedFacts, + }); + + // Template mixing facts and trusted syntaxes + const template = '%{facts.os.family}/%{trusted.certname}.yaml'; + const result = parser.interpolatePath(template, facts); + expect(result).toBe(`${rawFacts.os.family}/${trustedFacts.certname}.yaml`); + } + ), + propertyTestConfig + ); + }); + + it('should correctly interpolate all variables in any valid path template', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + // Generator for path templates with embedded variables + // When key1 === key2, the second value overwrites the first in the facts object + const pathTemplateArb = fc.tuple( + factNameArb, + factValueArb, + factNameArb, + factValueArb, + ).map(([key1, val1, key2, val2]) => { + // Build the facts object - if keys are the same, val2 overwrites val1 + const factsObj: Record = { [key1]: val1, [key2]: val2 }; + // Calculate expected based on actual fact values that will be used + const expectedVal1 = key1 === key2 ? val2 : val1; + const expectedVal2 = val2; + return { + template: `data/%{facts.${key1}}/%{facts.${key2}}.yaml`, + facts: factsObj, + expected: `data/${expectedVal1}/${expectedVal2}.yaml`, + }; + }); + + fc.assert( + fc.property(pathTemplateArb, ({ template, facts: rawFacts, expected }) => { + const facts = createFacts(rawFacts); + const result = parser.interpolatePath(template, facts); + expect(result).toBe(expected); + }), + propertyTestConfig + ); + }); + + it('should handle deeply nested fact paths', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property( + fc.tuple(factValueArb, factValueArb, factValueArb), + ([level1, level2, level3]) => { + const rawFacts = { + deep: { + nested: { + value: level1, + another: { + level: level2, + }, + }, + }, + simple: level3, + }; + const facts = createFacts(rawFacts); + + // Test deeply nested path + const template1 = 'data/%{facts.deep.nested.value}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`data/${level1}.yaml`); + + // Test even deeper nesting + const template2 = 'data/%{facts.deep.nested.another.level}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`data/${level2}.yaml`); + } + ), + propertyTestConfig + ); + }); + + it('should handle simple variable syntax without prefix', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property(simpleFacts, (rawFacts) => { + const facts = createFacts(rawFacts); + const factKeys = Object.keys(rawFacts); + + if (factKeys.length > 0) { + const key = factKeys[0]; + const template = `data/%{${key}}.yaml`; + const result = parser.interpolatePath(template, facts); + expect(result).toBe(`data/${rawFacts[key]}.yaml`); + } + }), + propertyTestConfig + ); + }); + + it('should convert non-string fact values to strings during interpolation', () => { + const parser = new HieraParser('/tmp/test-control-repo'); + + fc.assert( + fc.property( + fc.tuple(fc.integer({ min: 0, max: 1000 }), fc.boolean()), + ([numValue, boolValue]) => { + const rawFacts = { + port: numValue, + enabled: boolValue, + }; + const facts = createFacts(rawFacts); + + // Test with integer value + const template1 = 'ports/%{facts.port}.yaml'; + const result1 = parser.interpolatePath(template1, facts); + expect(result1).toBe(`ports/${numValue}.yaml`); + + // Test with boolean value + const template2 = 'flags/%{facts.enabled}.yaml'; + const result2 = parser.interpolatePath(template2, facts); + expect(result2).toBe(`flags/${boolValue}.yaml`); + } + ), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-6.test.ts b/backend/test/properties/hiera/property-6.test.ts new file mode 100644 index 0000000..08a04f4 --- /dev/null +++ b/backend/test/properties/hiera/property-6.test.ts @@ -0,0 +1,445 @@ +/** + * Feature: hiera-codebase-integration, Property 6: Fact Source Priority + * Validates: Requirements 3.1, 3.5 + * + * This property test verifies that: + * For any node where both PuppetDB and local fact files contain facts, + * the Fact_Service SHALL return the PuppetDB facts when PuppetDB integration + * is available and configured as preferred. + */ + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import { FactService } from "../../../src/integrations/hiera/FactService"; +import type { IntegrationManager } from "../../../src/integrations/IntegrationManager"; +import type { InformationSourcePlugin } from "../../../src/integrations/types"; +import type { Facts, LocalFactFile } from "../../../src/integrations/hiera/types"; + +// Mock fs module +vi.mock("fs"); + +describe("Property 6: Fact Source Priority", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + let factService: FactService; + let mockIntegrationManager: IntegrationManager; + let mockPuppetDBSource: InformationSourcePlugin; + + const testLocalFactsPath = "/tmp/facts"; + + // Generator for valid node names (hostname-like strings) + const nodeNameArb = fc + .string({ minLength: 1, maxLength: 30 }) + .filter((s) => /^[a-z][a-z0-9-]*[a-z0-9]$/.test(s) || /^[a-z]$/.test(s)) + .map((s) => `${s}.example.com`); + + // Generator for simple fact values + const simpleFactValueArb: fc.Arbitrary = fc.oneof( + fc.string({ minLength: 1, maxLength: 50 }).filter((s) => !s.includes("\u0000")), + fc.integer({ min: -1000000, max: 1000000 }), + fc.boolean() + ); + + // Generator for fact keys + const factKeyArb = fc + .string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Generator for fact values object + const factValuesArb: fc.Arbitrary> = fc.dictionary( + factKeyArb, + simpleFactValueArb, + { minKeys: 1, maxKeys: 10 } + ); + + // Generator for PuppetDB Facts object + const puppetDBFactsArb: fc.Arbitrary = fc.record({ + nodeId: nodeNameArb, + gatheredAt: fc.constant(new Date().toISOString()), + facts: factValuesArb.map((values) => ({ + os: { + family: "RedHat", + name: "CentOS", + release: { full: "7.9", major: "7" }, + }, + processors: { count: 4, models: ["Intel Xeon"] }, + memory: { system: { total: "16 GB", available: "8 GB" } }, + networking: { hostname: "puppetdb-node", interfaces: {} }, + ...values, + source_marker: "puppetdb", // Marker to identify source + })), + }); + + // Generator for local fact file + const localFactFileArb: fc.Arbitrary = fc.record({ + name: nodeNameArb, + values: factValuesArb.map((values) => ({ + os: { + family: "Debian", + name: "Ubuntu", + release: { full: "20.04", major: "20" }, + }, + processors: { count: 2, models: ["AMD EPYC"] }, + memory: { system: { total: "8 GB", available: "4 GB" } }, + networking: { hostname: "local-node", interfaces: {} }, + ...values, + source_marker: "local", // Marker to identify source + })), + }); + + beforeEach(() => { + vi.clearAllMocks(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + /** + * Helper to create a mock PuppetDB source + */ + function createMockPuppetDBSource( + initialized: boolean, + factsToReturn?: Facts + ): InformationSourcePlugin { + return { + name: "puppetdb", + type: "information", + isInitialized: vi.fn().mockReturnValue(initialized), + getNodeFacts: vi.fn().mockImplementation(async () => { + if (!initialized) { + throw new Error("PuppetDB not initialized"); + } + if (factsToReturn) { + return factsToReturn; + } + throw new Error("No facts available"); + }), + getInventory: vi.fn().mockResolvedValue([]), + getNodeData: vi.fn(), + initialize: vi.fn(), + healthCheck: vi.fn(), + getConfig: vi.fn(), + } as unknown as InformationSourcePlugin; + } + + /** + * Helper to setup local fact file mock + */ + function setupLocalFactFileMock(localFactFile: LocalFactFile): void { + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(localFactFile)); + } + + it("should return PuppetDB facts when both sources are available and PuppetDB is preferred", async () => { + await fc.assert( + fc.asyncProperty( + puppetDBFactsArb, + localFactFileArb, + async (puppetDBFacts, localFactFile) => { + // Use the same nodeId for both sources + const nodeId = puppetDBFacts.nodeId; + localFactFile.name = nodeId; + + // Setup PuppetDB mock - initialized and returning facts + mockPuppetDBSource = createMockPuppetDBSource(true, puppetDBFacts); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + // Setup local facts mock + setupLocalFactFileMock(localFactFile); + + // Create FactService with PuppetDB preferred (default) + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + // Get facts + const result = await factService.getFacts(nodeId); + + // Should return PuppetDB facts + expect(result.source).toBe("puppetdb"); + expect(result.facts.facts.source_marker).toBe("puppetdb"); + expect(result.warnings).toBeUndefined(); + } + ), + propertyTestConfig + ); + }); + + it("should return local facts when PuppetDB is not initialized", async () => { + await fc.assert( + fc.asyncProperty( + puppetDBFactsArb, + localFactFileArb, + async (puppetDBFacts, localFactFile) => { + const nodeId = puppetDBFacts.nodeId; + localFactFile.name = nodeId; + + // Setup PuppetDB mock - NOT initialized + mockPuppetDBSource = createMockPuppetDBSource(false); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + // Setup local facts mock + setupLocalFactFileMock(localFactFile); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should fall back to local facts + expect(result.source).toBe("local"); + expect(result.facts.facts.source_marker).toBe("local"); + expect(result.warnings).toContain( + "Using local fact files - facts may be outdated" + ); + } + ), + propertyTestConfig + ); + }); + + it("should return local facts when PuppetDB fails to retrieve facts", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + + // Setup PuppetDB mock - initialized but throws error + mockPuppetDBSource = { + name: "puppetdb", + type: "information", + isInitialized: vi.fn().mockReturnValue(true), + getNodeFacts: vi.fn().mockRejectedValue(new Error("Node not found")), + getInventory: vi.fn().mockResolvedValue([]), + getNodeData: vi.fn(), + initialize: vi.fn(), + healthCheck: vi.fn(), + getConfig: vi.fn(), + } as unknown as InformationSourcePlugin; + + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + setupLocalFactFileMock(localFactFile); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should fall back to local facts + expect(result.source).toBe("local"); + expect(result.facts.facts.source_marker).toBe("local"); + }), + propertyTestConfig + ); + }); + + it("should return local facts first when preferPuppetDB is false", async () => { + await fc.assert( + fc.asyncProperty( + puppetDBFactsArb, + localFactFileArb, + async (puppetDBFacts, localFactFile) => { + const nodeId = puppetDBFacts.nodeId; + localFactFile.name = nodeId; + + // Setup PuppetDB mock - initialized and has facts + mockPuppetDBSource = createMockPuppetDBSource(true, puppetDBFacts); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + setupLocalFactFileMock(localFactFile); + + // Create FactService with PuppetDB NOT preferred + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: false, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should return local facts since preferPuppetDB is false + expect(result.source).toBe("local"); + expect(result.facts.facts.source_marker).toBe("local"); + } + ), + propertyTestConfig + ); + }); + + it("should return PuppetDB facts as fallback when preferPuppetDB is false but local facts unavailable", async () => { + await fc.assert( + fc.asyncProperty(puppetDBFactsArb, async (puppetDBFacts) => { + const nodeId = puppetDBFacts.nodeId; + + // Setup PuppetDB mock - initialized and has facts + mockPuppetDBSource = createMockPuppetDBSource(true, puppetDBFacts); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + // Local facts file does NOT exist + vi.mocked(fs.existsSync).mockReturnValue(false); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: false, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should fall back to PuppetDB + expect(result.source).toBe("puppetdb"); + expect(result.facts.facts.source_marker).toBe("puppetdb"); + }), + propertyTestConfig + ); + }); + + it("should return empty facts with warning when neither source is available", async () => { + await fc.assert( + fc.asyncProperty(nodeNameArb, async (nodeId) => { + // Setup PuppetDB mock - NOT initialized + mockPuppetDBSource = createMockPuppetDBSource(false); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + // Local facts file does NOT exist + vi.mocked(fs.existsSync).mockReturnValue(false); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should return empty facts with warning + expect(result.source).toBe("local"); + expect(result.warnings).toBeDefined(); + expect(result.warnings).toContain(`No facts available for node '${nodeId}'`); + expect(result.facts.facts.os.family).toBe("Unknown"); + }), + propertyTestConfig + ); + }); + + it("should correctly report fact source via getFactSource when PuppetDB is available", async () => { + await fc.assert( + fc.asyncProperty(puppetDBFactsArb, async (puppetDBFacts) => { + const nodeId = puppetDBFacts.nodeId; + + // Setup PuppetDB mock - initialized and has facts + mockPuppetDBSource = createMockPuppetDBSource(true, puppetDBFacts); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const source = await factService.getFactSource(nodeId); + + expect(source).toBe("puppetdb"); + }), + propertyTestConfig + ); + }); + + it("should correctly report fact source via getFactSource when only local facts available", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + + // Setup PuppetDB mock - NOT initialized + mockPuppetDBSource = createMockPuppetDBSource(false); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + setupLocalFactFileMock(localFactFile); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const source = await factService.getFactSource(nodeId); + + expect(source).toBe("local"); + }), + propertyTestConfig + ); + }); + + it("should report 'none' when no fact source is available", async () => { + await fc.assert( + fc.asyncProperty(nodeNameArb, async (nodeId) => { + // Setup PuppetDB mock - NOT initialized + mockPuppetDBSource = createMockPuppetDBSource(false); + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + // Local facts file does NOT exist + vi.mocked(fs.existsSync).mockReturnValue(false); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const source = await factService.getFactSource(nodeId); + + expect(source).toBe("none"); + }), + propertyTestConfig + ); + }); + + it("should handle null PuppetDB source gracefully", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + + // Setup IntegrationManager to return null for PuppetDB + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(null), + } as unknown as IntegrationManager; + + setupLocalFactFileMock(localFactFile); + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + + const result = await factService.getFacts(nodeId); + + // Should fall back to local facts + expect(result.source).toBe("local"); + expect(result.facts.facts.source_marker).toBe("local"); + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-7.test.ts b/backend/test/properties/hiera/property-7.test.ts new file mode 100644 index 0000000..27a6cae --- /dev/null +++ b/backend/test/properties/hiera/property-7.test.ts @@ -0,0 +1,345 @@ +/** + * Feature: hiera-codebase-integration, Property 7: Local Fact File Parsing + * Validates: Requirements 3.3, 3.4 + * + * This property test verifies that: + * For any valid JSON file in Puppetserver fact format (with "name" and "values" fields), + * the Fact_Service SHALL parse it and return a Facts object with all values accessible. + */ + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import { FactService } from "../../../src/integrations/hiera/FactService"; +import type { IntegrationManager } from "../../../src/integrations/IntegrationManager"; +import type { InformationSourcePlugin } from "../../../src/integrations/types"; +import type { LocalFactFile } from "../../../src/integrations/hiera/types"; + +// Mock fs module +vi.mock("fs"); + +describe("Property 7: Local Fact File Parsing", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + let factService: FactService; + let mockIntegrationManager: IntegrationManager; + let mockPuppetDBSource: InformationSourcePlugin; + + const testLocalFactsPath = "/tmp/facts"; + + beforeEach(() => { + vi.clearAllMocks(); + + // Create mock PuppetDB source that is NOT initialized + // This forces the FactService to use local facts + mockPuppetDBSource = { + name: "puppetdb", + type: "information", + isInitialized: vi.fn().mockReturnValue(false), + getNodeFacts: vi.fn(), + getInventory: vi.fn().mockResolvedValue([]), + getNodeData: vi.fn(), + initialize: vi.fn(), + healthCheck: vi.fn(), + getConfig: vi.fn(), + } as unknown as InformationSourcePlugin; + + mockIntegrationManager = { + getInformationSource: vi.fn().mockReturnValue(mockPuppetDBSource), + } as unknown as IntegrationManager; + + factService = new FactService(mockIntegrationManager, { + preferPuppetDB: true, + localFactsPath: testLocalFactsPath, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + // Generator for valid node names (hostname-like strings) + const nodeNameArb = fc + .string({ minLength: 1, maxLength: 30 }) + .filter((s) => /^[a-z][a-z0-9-]*[a-z0-9]$/.test(s) || /^[a-z]$/.test(s)) + .map((s) => `${s}.example.com`); + + // Generator for simple fact values (strings, numbers, booleans) + const simpleFactValueArb: fc.Arbitrary = fc.oneof( + fc.string({ minLength: 0, maxLength: 50 }).filter((s) => !s.includes("\u0000")), + fc.integer({ min: -1000000, max: 1000000 }), + fc.boolean() + ); + + // Generator for fact keys (valid identifier-like strings) + const factKeyArb = fc + .string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z_]*$/.test(s)); + + // Generator for nested fact objects (up to 2 levels deep) + const nestedFactValueArb: fc.Arbitrary> = fc.dictionary( + factKeyArb, + fc.oneof( + simpleFactValueArb, + fc.array(simpleFactValueArb, { minLength: 0, maxLength: 5 }) + ), + { minKeys: 0, maxKeys: 5 } + ); + + // Generator for fact values (can be simple, array, or nested object) + const factValueArb: fc.Arbitrary = fc.oneof( + simpleFactValueArb, + fc.array(simpleFactValueArb, { minLength: 0, maxLength: 5 }), + nestedFactValueArb + ); + + // Generator for the values object in LocalFactFile + const factValuesArb: fc.Arbitrary> = fc.dictionary( + factKeyArb, + factValueArb, + { minKeys: 1, maxKeys: 10 } + ); + + // Generator for valid LocalFactFile (Puppetserver format) + const localFactFileArb: fc.Arbitrary = fc.record({ + name: nodeNameArb, + values: factValuesArb, + }); + + /** + * Helper to check if a value is accessible in the parsed facts + */ + function isValueAccessible( + facts: Record, + key: string, + expectedValue: unknown + ): boolean { + const actualValue = facts[key]; + + // Handle nested objects + if ( + typeof expectedValue === "object" && + expectedValue !== null && + !Array.isArray(expectedValue) + ) { + if (typeof actualValue !== "object" || actualValue === null) { + return false; + } + // Check all nested keys + for (const [nestedKey, nestedValue] of Object.entries( + expectedValue as Record + )) { + if ( + !isValueAccessible( + actualValue as Record, + nestedKey, + nestedValue + ) + ) { + return false; + } + } + return true; + } + + // Handle arrays + if (Array.isArray(expectedValue)) { + if (!Array.isArray(actualValue)) { + return false; + } + if (actualValue.length !== expectedValue.length) { + return false; + } + for (let i = 0; i < expectedValue.length; i++) { + if (actualValue[i] !== expectedValue[i]) { + return false; + } + } + return true; + } + + // Handle simple values + return actualValue === expectedValue; + } + + it("should parse any valid Puppetserver format fact file and make all values accessible", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + // Mock file system + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + // Parse the fact file + const result = await factService.getFacts(nodeId); + + // Should successfully parse + expect(result.source).toBe("local"); + expect(result.facts).toBeDefined(); + expect(result.facts.nodeId).toBe(nodeId); + + // All original values should be accessible in the parsed facts + for (const [key, value] of Object.entries(localFactFile.values)) { + expect( + isValueAccessible(result.facts.facts, key, value), + `Value for key '${key}' should be accessible` + ).toBe(true); + } + }), + propertyTestConfig + ); + }); + + it("should preserve the node name from the fact file", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // The nodeId in the result should match the requested nodeId + expect(result.facts.nodeId).toBe(nodeId); + }), + propertyTestConfig + ); + }); + + it("should include a gatheredAt timestamp for any parsed fact file", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // Should have a valid ISO timestamp + expect(result.facts.gatheredAt).toBeDefined(); + expect(() => new Date(result.facts.gatheredAt)).not.toThrow(); + expect(new Date(result.facts.gatheredAt).toISOString()).toBe( + result.facts.gatheredAt + ); + }), + propertyTestConfig + ); + }); + + it("should provide default values for standard fact fields when missing", async () => { + // Generator for fact files with only custom facts (no standard fields) + const customOnlyFactFileArb = fc.record({ + name: nodeNameArb, + values: fc.dictionary( + factKeyArb.filter( + (k) => !["os", "processors", "memory", "networking"].includes(k) + ), + simpleFactValueArb, + { minKeys: 1, maxKeys: 5 } + ), + }); + + await fc.assert( + fc.asyncProperty(customOnlyFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // Standard fields should have default values + expect(result.facts.facts.os).toBeDefined(); + expect(result.facts.facts.os.family).toBe("Unknown"); + expect(result.facts.facts.os.name).toBe("Unknown"); + expect(result.facts.facts.processors).toBeDefined(); + expect(result.facts.facts.processors.count).toBe(0); + expect(result.facts.facts.memory).toBeDefined(); + expect(result.facts.facts.memory.system.total).toBe("Unknown"); + expect(result.facts.facts.networking).toBeDefined(); + expect(result.facts.facts.networking.hostname).toBe("Unknown"); + }), + propertyTestConfig + ); + }); + + it("should return local source indicator for any parsed local fact file", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // Source should always be 'local' when using local fact files + expect(result.source).toBe("local"); + }), + propertyTestConfig + ); + }); + + it("should include warning about outdated facts for any local fact file", async () => { + await fc.assert( + fc.asyncProperty(localFactFileArb, async (localFactFile) => { + const nodeId = localFactFile.name; + const factFileContent = JSON.stringify(localFactFile); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // Should include warning about potentially outdated facts + expect(result.warnings).toBeDefined(); + expect(result.warnings).toContain( + "Using local fact files - facts may be outdated" + ); + }), + propertyTestConfig + ); + }); + + // Test for flat fact structure (alternative format) + it("should also parse flat fact structure (non-Puppetserver format)", async () => { + // Generator for flat fact structure (no name/values wrapper) + const flatFactsArb = factValuesArb; + + await fc.assert( + fc.asyncProperty(nodeNameArb, flatFactsArb, async (nodeId, flatFacts) => { + const factFileContent = JSON.stringify(flatFacts); + + vi.mocked(fs.existsSync).mockReturnValue(true); + vi.mocked(fs.readFileSync).mockReturnValue(factFileContent); + + const result = await factService.getFacts(nodeId); + + // Should successfully parse + expect(result.source).toBe("local"); + expect(result.facts).toBeDefined(); + expect(result.facts.nodeId).toBe(nodeId); + + // All original values should be accessible + for (const [key, value] of Object.entries(flatFacts)) { + expect( + isValueAccessible(result.facts.facts, key, value), + `Value for key '${key}' should be accessible in flat format` + ).toBe(true); + } + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-8.test.ts b/backend/test/properties/hiera/property-8.test.ts new file mode 100644 index 0000000..52a190a --- /dev/null +++ b/backend/test/properties/hiera/property-8.test.ts @@ -0,0 +1,298 @@ +/** + * Feature: hiera-codebase-integration, Property 8: Key Scanning Completeness + * Validates: Requirements 4.1, 4.2, 4.3, 4.4 + * + * This property test verifies that: + * For any hieradata directory containing YAML files, the Hiera_Scanner SHALL + * discover all unique keys across all files, tracking for each key: the file + * path, hierarchy level, line number, and value. + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { stringify as yamlStringify } from "yaml"; +import { HieraScanner } from "../../../src/integrations/hiera/HieraScanner"; + +describe("Property 8: Key Scanning Completeness", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + let testDir: string; + let scanner: HieraScanner; + + beforeEach(() => { + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-prop8-")); + scanner = new HieraScanner(testDir, "data"); + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + }); + + afterEach(() => { + scanner.stopWatching(); + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + // Generator for valid Hiera key names (Puppet-style with double colons) + const hieraKeyNameArb = fc + .array( + fc.string({ minLength: 1, maxLength: 15 }).filter((s) => /^[a-z][a-z0-9_]*$/.test(s)), + { minLength: 1, maxLength: 4 } + ) + .map((parts) => parts.join("::")); + + // Generator for simple values (string, number, boolean) + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 20 }).filter((s) => /^[a-zA-Z0-9_-]+$/.test(s)), + fc.integer({ min: 0, max: 10000 }), + fc.boolean() + ); + + // Generator for hieradata content (flat key-value pairs) + const hieradataArb = fc + .array(fc.tuple(hieraKeyNameArb, simpleValueArb), { minLength: 1, maxLength: 10 }) + .map((pairs) => { + const obj: Record = {}; + for (const [key, value] of pairs) { + obj[key] = value; + } + return obj; + }); + + // Generator for file names + const fileNameArb = fc + .string({ minLength: 1, maxLength: 20 }) + .filter((s) => /^[a-z][a-z0-9_-]*$/.test(s)) + .map((s) => `${s}.yaml`); + + /** + * Helper to create a test file + */ + function createTestFile(relativePath: string, data: Record): void { + const fullPath = path.join(testDir, relativePath); + const dir = path.dirname(fullPath); + fs.mkdirSync(dir, { recursive: true }); + fs.writeFileSync(fullPath, yamlStringify(data), "utf-8"); + } + + it("should discover all keys from any valid hieradata file", async () => { + await fc.assert( + fc.asyncProperty(hieradataArb, fileNameArb, async (data, fileName) => { + // Create the test file + const relativePath = `data/${fileName}`; + createTestFile(relativePath, data); + + // Scan the directory + const index = await scanner.scan(); + + // All keys from the data should be discovered + const expectedKeys = Object.keys(data); + for (const key of expectedKeys) { + expect(index.keys.has(key)).toBe(true); + } + + // Clean up for next iteration + fs.rmSync(path.join(testDir, relativePath), { force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + + it("should track file path for each discovered key", async () => { + await fc.assert( + fc.asyncProperty(hieradataArb, fileNameArb, async (data, fileName) => { + const relativePath = `data/${fileName}`; + createTestFile(relativePath, data); + + const index = await scanner.scan(); + + // Each key should have a location with the correct file path + for (const key of Object.keys(data)) { + const hieraKey = index.keys.get(key); + expect(hieraKey).toBeDefined(); + expect(hieraKey!.locations.length).toBeGreaterThan(0); + expect(hieraKey!.locations[0].file).toBe(relativePath); + } + + // Clean up + fs.rmSync(path.join(testDir, relativePath), { force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + it("should track hierarchy level for each discovered key", async () => { + await fc.assert( + fc.asyncProperty(hieradataArb, async (data) => { + // Use common.yaml to get predictable hierarchy level + const relativePath = "data/common.yaml"; + createTestFile(relativePath, data); + + const index = await scanner.scan(); + + // Each key should have a location with hierarchy level + for (const key of Object.keys(data)) { + const hieraKey = index.keys.get(key); + expect(hieraKey).toBeDefined(); + expect(hieraKey!.locations[0].hierarchyLevel).toBe("Common data"); + } + + // Clean up + fs.rmSync(path.join(testDir, relativePath), { force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + it("should track value for each discovered key", async () => { + await fc.assert( + fc.asyncProperty(hieradataArb, fileNameArb, async (data, fileName) => { + const relativePath = `data/${fileName}`; + createTestFile(relativePath, data); + + const index = await scanner.scan(); + + // Each key should have the correct value stored + for (const [key, expectedValue] of Object.entries(data)) { + const hieraKey = index.keys.get(key); + expect(hieraKey).toBeDefined(); + expect(hieraKey!.locations[0].value).toEqual(expectedValue); + } + + // Clean up + fs.rmSync(path.join(testDir, relativePath), { force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + it("should track all occurrences when key appears in multiple files", async () => { + // Generator for two different hieradata objects that share at least one key + const sharedKeyArb = hieraKeyNameArb; + const value1Arb = simpleValueArb; + const value2Arb = simpleValueArb; + + await fc.assert( + fc.asyncProperty(sharedKeyArb, value1Arb, value2Arb, async (sharedKey, value1, value2) => { + // Create two files with the same key + createTestFile("data/common.yaml", { [sharedKey]: value1 }); + createTestFile("data/nodes/node1.yaml", { [sharedKey]: value2 }); + + const index = await scanner.scan(); + + // The key should have two locations + const hieraKey = index.keys.get(sharedKey); + expect(hieraKey).toBeDefined(); + expect(hieraKey!.locations.length).toBe(2); + + // Both values should be tracked + const values = hieraKey!.locations.map((loc) => loc.value); + expect(values).toContain(value1); + expect(values).toContain(value2); + + // Both files should be tracked + const files = hieraKey!.locations.map((loc) => loc.file); + expect(files).toContain("data/common.yaml"); + expect(files).toContain("data/nodes/node1.yaml"); + + // Clean up + fs.rmSync(path.join(testDir, "data/common.yaml"), { force: true }); + fs.rmSync(path.join(testDir, "data/nodes"), { recursive: true, force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + it("should handle nested keys with dot notation", async () => { + // Generator for nested data structure + const nestedDataArb = fc + .tuple( + fc.string({ minLength: 1, maxLength: 10 }).filter((s) => /^[a-z][a-z0-9_]*$/.test(s)), + fc.string({ minLength: 1, maxLength: 10 }).filter((s) => /^[a-z][a-z0-9_]*$/.test(s)), + simpleValueArb + ) + .map(([parent, child, value]) => ({ + [parent]: { + [child]: value, + }, + })); + + await fc.assert( + fc.asyncProperty(nestedDataArb, async (data) => { + createTestFile("data/common.yaml", data); + + const index = await scanner.scan(); + + // Get the parent and child keys + const parentKey = Object.keys(data)[0]; + const childKey = Object.keys(data[parentKey] as Record)[0]; + const expectedNestedKey = `${parentKey}.${childKey}`; + + // Both parent and nested key should be discovered + expect(index.keys.has(parentKey)).toBe(true); + expect(index.keys.has(expectedNestedKey)).toBe(true); + + // Clean up + fs.rmSync(path.join(testDir, "data/common.yaml"), { force: true }); + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); + + it("should count total keys and files correctly", async () => { + // Generator for multiple files with different keys + const multiFileDataArb = fc.array( + fc.tuple( + fileNameArb, + fc.array(fc.tuple(hieraKeyNameArb, simpleValueArb), { minLength: 1, maxLength: 5 }) + ), + { minLength: 1, maxLength: 3 } + ); + + await fc.assert( + fc.asyncProperty(multiFileDataArb, async (filesData) => { + // Create all files + const allKeys = new Set(); + const fileNames = new Set(); + + for (const [fileName, pairs] of filesData) { + // Ensure unique file names + if (fileNames.has(fileName)) continue; + fileNames.add(fileName); + + const data: Record = {}; + for (const [key, value] of pairs) { + data[key] = value; + allKeys.add(key); + } + createTestFile(`data/${fileName}`, data); + } + + const index = await scanner.scan(); + + // Total keys should match unique keys + expect(index.totalKeys).toBe(allKeys.size); + + // Total files should match created files + expect(index.totalFiles).toBe(fileNames.size); + + // Clean up + for (const fileName of fileNames) { + fs.rmSync(path.join(testDir, `data/${fileName}`), { force: true }); + } + scanner = new HieraScanner(testDir, "data"); + }), + propertyTestConfig + ); + }); +}); diff --git a/backend/test/properties/hiera/property-9.test.ts b/backend/test/properties/hiera/property-9.test.ts new file mode 100644 index 0000000..17ea7bd --- /dev/null +++ b/backend/test/properties/hiera/property-9.test.ts @@ -0,0 +1,268 @@ +/** + * Feature: hiera-codebase-integration, Property 9: Key Search Functionality + * Validates: Requirements 4.5, 7.4 + * + * This property test verifies that: + * For any key index and search query string, searching SHALL return all keys + * whose names contain the query string as a substring (case-insensitive). + */ + +import { describe, it, expect, beforeEach, afterEach } from "vitest"; +import fc from "fast-check"; +import * as fs from "fs"; +import * as path from "path"; +import * as os from "os"; +import { stringify as yamlStringify } from "yaml"; +import { HieraScanner } from "../../../src/integrations/hiera/HieraScanner"; + +describe("Property 9: Key Search Functionality", () => { + const propertyTestConfig = { + numRuns: 100, + verbose: false, + }; + + let testDir: string; + let scanner: HieraScanner; + + beforeEach(() => { + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "hiera-prop9-")); + scanner = new HieraScanner(testDir, "data"); + fs.mkdirSync(path.join(testDir, "data"), { recursive: true }); + }); + + afterEach(() => { + scanner.stopWatching(); + fs.rmSync(testDir, { recursive: true, force: true }); + }); + + // Generator for valid Hiera key names + const hieraKeyNameArb = fc + .array( + fc.string({ minLength: 1, maxLength: 10 }).filter((s) => /^[a-z][a-z0-9_]*$/.test(s)), + { minLength: 1, maxLength: 3 } + ) + .map((parts) => parts.join("::")); + + // Generator for simple values + const simpleValueArb = fc.oneof( + fc.string({ minLength: 1, maxLength: 10 }).filter((s) => /^[a-zA-Z0-9_-]+$/.test(s)), + fc.integer({ min: 0, max: 1000 }) + ); + + // Generator for a set of unique keys + const keySetArb = fc + .array(hieraKeyNameArb, { minLength: 3, maxLength: 15 }) + .map((keys) => [...new Set(keys)]); + + /** + * Helper to create test data with given keys + */ + function createTestData(keys: string[]): void { + const data: Record = {}; + for (const key of keys) { + data[key] = `value_for_${key}`; + } + const fullPath = path.join(testDir, "data/common.yaml"); + fs.writeFileSync(fullPath, yamlStringify(data), "utf-8"); + } + + it("should return all keys containing the query as substring", async () => { + await fc.assert( + fc.asyncProperty(keySetArb, async (keys) => { + if (keys.length === 0) return; + + createTestData(keys); + await scanner.scan(); + + // Pick a random key and extract a substring from it + const randomKey = keys[Math.floor(Math.random() * keys.length)]; + const startIdx = Math.floor(Math.random() * Math.max(1, randomKey.length - 2)); + const endIdx = startIdx + Math.min(3, randomKey.length - startIdx); + const query = randomKey.substring(startIdx, endIdx); + + if (query.length === 0) return; + + const results = scanner.searchKeys(query); + + // All results should contain the query + for (const result of results) { + expect(result.name.toLowerCase()).toContain(query.toLowerCase()); + } + + // All keys containing the query should be in results + const resultNames = results.map((r) => r.name); + for (const key of keys) { + if (key.toLowerCase().includes(query.toLowerCase())) { + expect(resultNames).toContain(key); + } + } + }), + propertyTestConfig + ); + }); + + + it("should be case-insensitive", async () => { + await fc.assert( + fc.asyncProperty(keySetArb, async (keys) => { + if (keys.length === 0) return; + + createTestData(keys); + await scanner.scan(); + + // Pick a random key + const randomKey = keys[Math.floor(Math.random() * keys.length)]; + const query = randomKey.substring(0, Math.min(3, randomKey.length)); + + if (query.length === 0) return; + + // Search with different cases + const lowerResults = scanner.searchKeys(query.toLowerCase()); + const upperResults = scanner.searchKeys(query.toUpperCase()); + const mixedResults = scanner.searchKeys( + query + .split("") + .map((c, i) => (i % 2 === 0 ? c.toLowerCase() : c.toUpperCase())) + .join("") + ); + + // All should return the same results + const lowerNames = lowerResults.map((r) => r.name).sort(); + const upperNames = upperResults.map((r) => r.name).sort(); + const mixedNames = mixedResults.map((r) => r.name).sort(); + + expect(lowerNames).toEqual(upperNames); + expect(lowerNames).toEqual(mixedNames); + }), + propertyTestConfig + ); + }); + + it("should return all keys for empty query", async () => { + await fc.assert( + fc.asyncProperty(keySetArb, async (keys) => { + if (keys.length === 0) return; + + createTestData(keys); + await scanner.scan(); + + const emptyResults = scanner.searchKeys(""); + const whitespaceResults = scanner.searchKeys(" "); + + // Should return all keys + expect(emptyResults.length).toBe(keys.length); + expect(whitespaceResults.length).toBe(keys.length); + }), + propertyTestConfig + ); + }); + + it("should return empty array for non-matching query", async () => { + await fc.assert( + fc.asyncProperty(keySetArb, async (keys) => { + if (keys.length === 0) return; + + createTestData(keys); + await scanner.scan(); + + // Use a query that definitely won't match any key + const nonMatchingQuery = "ZZZZNONEXISTENT12345"; + + const results = scanner.searchKeys(nonMatchingQuery); + + expect(results.length).toBe(0); + }), + propertyTestConfig + ); + }); + + it("should support partial key name matching", async () => { + // Generator for keys with common prefix - ensure unique suffixes + const prefixedKeysArb = fc + .string({ minLength: 3, maxLength: 8 }) + .filter((s) => /^[a-z][a-z0-9_]*$/.test(s)) + .map((prefix) => { + // Create unique suffixes + return [`${prefix}::aaa`, `${prefix}::bbb`, `${prefix}::ccc`]; + }); + + await fc.assert( + fc.asyncProperty(prefixedKeysArb, async (keys) => { + createTestData(keys); + await scanner.scan(); + + // Extract the common prefix + const prefix = keys[0].split("::")[0]; + + const results = scanner.searchKeys(prefix); + + // All keys with the prefix should be found + expect(results.length).toBe(keys.length); + for (const result of results) { + expect(result.name.startsWith(prefix)).toBe(true); + } + }), + propertyTestConfig + ); + }); + + it("should find keys by suffix", async () => { + // Generator for keys with common suffix - ensure unique prefixes + const suffixedKeysArb = fc + .tuple( + fc.string({ minLength: 3, maxLength: 8 }).filter((s) => /^[a-z][a-z0-9_]*$/.test(s)) + ) + .map(([suffix]) => { + // Create unique prefixes + return [`aaa::${suffix}`, `bbb::${suffix}`, `ccc::${suffix}`]; + }); + + await fc.assert( + fc.asyncProperty(suffixedKeysArb, async (keys) => { + createTestData(keys); + await scanner.scan(); + + // Extract the common suffix + const suffix = keys[0].split("::").pop()!; + + const results = scanner.searchKeys(suffix); + + // All keys with the suffix should be found + expect(results.length).toBe(keys.length); + for (const result of results) { + expect(result.name.endsWith(suffix)).toBe(true); + } + }), + propertyTestConfig + ); + }); + + it("should find keys by middle substring", async () => { + // Create keys with a known middle part that won't match other keys + const middlePartArb = fc + .string({ minLength: 4, maxLength: 6 }) + .filter((s) => /^[xyz][a-z0-9_]*$/.test(s)); // Start with x, y, or z to avoid matching "other" + + await fc.assert( + fc.asyncProperty(middlePartArb, async (middlePart) => { + const keys = [ + `aaa::${middlePart}::bbb`, + `ccc::${middlePart}::ddd`, + `eee::fff::ggg`, + ]; + + createTestData(keys); + await scanner.scan(); + + const results = scanner.searchKeys(middlePart); + + // Should find exactly the keys containing the middle part + expect(results.length).toBe(2); + for (const result of results) { + expect(result.name).toContain(middlePart); + } + }), + propertyTestConfig + ); + }); +}); diff --git a/frontend/src/components/CatalogViewer.svelte b/frontend/src/components/CatalogViewer.svelte index c3612b1..0c5341f 100644 --- a/frontend/src/components/CatalogViewer.svelte +++ b/frontend/src/components/CatalogViewer.svelte @@ -27,8 +27,8 @@ environment: string; producer_timestamp: string; hash: string; - resources: Resource[]; - edges: Edge[]; + resources?: Resource[]; + edges?: Edge[]; } interface Props { @@ -47,8 +47,9 @@ // Group resources by type const resourcesByType = $derived(() => { const grouped = new Map(); + const resources = catalog.resources ?? []; - for (const resource of catalog.resources) { + for (const resource of resources) { if (!grouped.has(resource.type)) { grouped.set(resource.type, []); } @@ -86,7 +87,8 @@ // Get relationships for a resource function getResourceRelationships(resource: Resource): Edge[] { - return catalog.edges.filter(edge => + const edges = catalog.edges ?? []; + return edges.filter(edge => (edge.source.type === resource.type && edge.source.title === resource.title) || (edge.target.type === resource.type && edge.target.title === resource.title) ); @@ -119,10 +121,9 @@
- +
-

Puppet Catalog

-
+
Environment: {catalog.environment} @@ -137,7 +138,7 @@
Resources: - {catalog.resources.length} + {catalog.resources?.length ?? 0}
@@ -226,41 +227,21 @@
{#each resources as resource} -
- -
+ {/each}
diff --git a/frontend/src/components/CodeAnalysisTab.svelte b/frontend/src/components/CodeAnalysisTab.svelte new file mode 100644 index 0000000..cb9f135 --- /dev/null +++ b/frontend/src/components/CodeAnalysisTab.svelte @@ -0,0 +1,832 @@ + + +
+ +
+ + + + +
+ + + {#if loading} +
+ +
+ {:else if error} + loadSectionData(activeSection)} + /> + + + {#if error.includes('not configured')} +
+
+ + + +
+

Setup Required

+

+ To view code analysis, you need to configure the Hiera integration with your Puppet control repository. +

+
    +
  1. Go to the Integration Setup page
  2. +
  3. Configure the path to your Puppet control repository
  4. +
  5. Ensure the repository contains Puppet manifests and a Puppetfile
  6. +
  7. Return to this page to view code analysis
  8. +
+
+
+
+ {/if} + {:else} + + + {#if activeSection === 'statistics' && statistics} +
+ +
+
+
{statistics.totalManifests}
+
Manifests
+
+
+
{statistics.totalClasses}
+
Classes
+
+
+
{statistics.totalDefinedTypes}
+
Defined Types
+
+
+
{statistics.totalFunctions}
+
Functions
+
+
+
{statistics.linesOfCode.toLocaleString()}
+
Lines of Code
+
+
+ + + {#if statistics.mostUsedClasses.length > 0} +
+
+

Most Used Classes

+

Classes ranked by usage frequency across nodes

+
+
+ {#each statistics.mostUsedClasses.slice(0, 10) as classUsage, index (classUsage.name)} +
+
+ + {index + 1} + +
+ {classUsage.name} + {#if expertMode.enabled && classUsage.nodes.length > 0} +

+ Nodes: {classUsage.nodes.slice(0, 3).join(', ')}{classUsage.nodes.length > 3 ? ` +${classUsage.nodes.length - 3} more` : ''} +

+ {/if} +
+
+
+ + {classUsage.usageCount} node{classUsage.usageCount !== 1 ? 's' : ''} + +
+
+ {/each} +
+
+ {/if} + + + {#if statistics.mostUsedResources.length > 0} +
+
+

Most Used Resource Types

+

Resource types ranked by total count

+
+
+ {#each statistics.mostUsedResources.slice(0, 10) as resource, index (resource.type)} +
+
+ + {index + 1} + + {resource.type} +
+ + {resource.count.toLocaleString()} instance{resource.count !== 1 ? 's' : ''} + +
+ {/each} +
+
+ {/if} +
+ {/if} + + + {#if activeSection === 'unused' && unusedCode} +
+ +
+
+
{unusedCode.totals.classes}
+
Unused Classes
+
+
+
{unusedCode.totals.definedTypes}
+
Unused Defined Types
+
+
+
{unusedCode.totals.hieraKeys}
+
Unused Hiera Keys
+
+
+ + +
+ Filter: +
+ + + + +
+
+ + + {#if filteredUnusedItems().length === 0} +
+ + + +

No Unused Code Found

+

+ {unusedTypeFilter === 'all' ? 'All code in your control repository is being used.' : `No unused ${formatType(unusedTypeFilter).toLowerCase()}s found.`} +

+
+ {:else} +
+
+ {#each filteredUnusedItems() as item (item.name + item.file + item.line)} +
+
+
+ {item.name} + + {formatType(item.type)} + +
+

+ {item.file}:{item.line} +

+
+
+ {/each} +
+
+

+ Showing {filteredUnusedItems().length} unused item{filteredUnusedItems().length !== 1 ? 's' : ''} +

+ {/if} +
+ {/if} + + + + {#if activeSection === 'lint' && lintData} +
+ +
+
+
{lintData.counts['error'] || 0}
+
Errors
+
+
+
{lintData.counts['warning'] || 0}
+
Warnings
+
+
+
{lintData.counts['info'] || 0}
+
Info
+
+
+ + +
+ Filter by severity: +
+ + + +
+ {#if lintSeverityFilter.length > 0} + + {/if} +
+ + + {#if lintData.issues.length === 0} +
+ + + +

No Lint Issues Found

+

+ {lintSeverityFilter.length > 0 ? 'No issues match the selected filters.' : 'Your Puppet code has no lint issues.'} +

+
+ {:else} +
+
+ {#each lintData.issues as issue (issue.file + issue.line + issue.column + issue.rule)} +
+
+
+
+ + {issue.severity} + + {issue.rule} + {#if issue.fixable} + + Fixable + + {/if} +
+

{issue.message}

+

+ {issue.file}:{issue.line}:{issue.column} +

+
+
+
+ {/each} +
+
+ + + {#if lintData.totalPages > 1} +
+

+ Showing {(lintData.page - 1) * lintData.pageSize + 1} - {Math.min(lintData.page * lintData.pageSize, lintData.total)} of {lintData.total} issues +

+
+ + + Page {lintData.page} of {lintData.totalPages} + + +
+
+ {/if} + {/if} +
+ {/if} + + + {#if activeSection === 'modules' && modulesData} +
+ +
+
+
{modulesData.summary.total}
+
Total Modules
+
+
+
{modulesData.summary.upToDate}
+
Up to Date
+
+
+
{modulesData.summary.withUpdates}
+
Updates Available
+
+
+
{modulesData.summary.withSecurityAdvisories}
+
Security Advisories
+
+
+ + + {#if modulesData.modulesWithSecurityAdvisories.length > 0} +
+
+
+ + + +

Security Advisories

+
+

These modules have known security vulnerabilities. Update them as soon as possible.

+
+
+ {#each modulesData.modulesWithSecurityAdvisories as module (module.name)} +
+
+ {module.name} +
+ {module.currentVersion} + + + + {module.latestVersion} +
+
+ + {module.source} + +
+ {/each} +
+
+ {/if} + + + {#if modulesData.modulesWithUpdates.length > 0} +
+
+

Updates Available

+

These modules have newer versions available.

+
+
+ {#each modulesData.modulesWithUpdates.filter(m => !m.hasSecurityAdvisory) as module (module.name)} +
+
+ {module.name} +
+ {module.currentVersion} + + + + {module.latestVersion} +
+
+ + {module.source} + +
+ {/each} +
+
+ {/if} + + +
+
+

All Modules

+
+
+ {#each modulesData.modules as module (module.name)} +
+
+
+
+ {module.name} + {#if module.hasSecurityAdvisory} + + Security + + {:else if module.currentVersion !== module.latestVersion} + + Update + + {:else} + + Current + + {/if} +
+
+ {module.currentVersion} + {#if module.currentVersion !== module.latestVersion} + + + + {module.latestVersion} + {/if} +
+
+
+ + {module.source} + +
+ {/each} +
+
+
+ {/if} + {/if} +
diff --git a/frontend/src/components/EventsViewer.svelte b/frontend/src/components/EventsViewer.svelte index 969c99c..fe1407e 100644 --- a/frontend/src/components/EventsViewer.svelte +++ b/frontend/src/components/EventsViewer.svelte @@ -1,4 +1,5 @@
@@ -206,6 +269,26 @@ {/if}
+ +
+ +
+
+ {#each timeFilterOptions as option (option.value)} + + {/each} +
+
+
+
- {#if statusFilter !== 'all' || resourceTypeFilter || searchQuery} + {#if statusFilter !== 'all' || resourceTypeFilter || searchQuery || timeFilter !== 'last-run'} + {/if} +
+ + + {#if searchQuery && !selectedKey} +
+ {#if searchLoading} +
+ +
+ {:else if searchError} +
{searchError}
+ {:else if searchResults.length === 0} +
+ No keys found matching "{searchQuery}" +
+ {:else} + {#each searchResults as key (key.name)} + + {/each} + {/if} +
+ {/if} +
+ + + + {#if selectedKey} +
+ +
+
+
+ +

{selectedKey}

+
+ + +
+ View: +
+ + +
+
+
+
+ + +
+ {#if keyDataLoading} +
+ +
+ {:else if keyDataError} + selectKey(selectedKey!)} + /> + {:else if keyNodeData} + +
+ {keyNodeData.total} node{keyNodeData.total !== 1 ? 's' : ''} + + {keyNodeData.nodes.filter(n => n.found).length} with value + + + {keyNodeData.nodes.filter(n => !n.found).length} not defined + + {#if keyNodeData.groupedByValue.length > 0} + + {keyNodeData.groupedByValue.length} unique value{keyNodeData.groupedByValue.length !== 1 ? 's' : ''} + + {/if} +
+ + {#if viewMode === 'grouped'} + +
+ {#if keyNodeData.groupedByValue.length === 0} +
+ + + +

+ This key is not defined for any nodes +

+
+ {:else} + {#each keyNodeData.groupedByValue as group, index (group.valueString)} +
+ +
+
+
+ + Value {index + 1} + + + {group.nodes.length} node{group.nodes.length !== 1 ? 's' : ''} + +
+
+
+ {#if isComplexValue(group.value)} +
{formatValue(group.value)}
+ {:else} + {formatValue(group.value)} + {/if} +
+
+ + +
+
+ {#each group.nodes as nodeId (nodeId)} + + {/each} +
+
+
+ {/each} + {/if} + + + {#if keyNodeData.nodes.filter(n => !n.found).length > 0} +
+
+
+ + Not Defined + + + {keyNodeData.nodes.filter(n => !n.found).length} node{keyNodeData.nodes.filter(n => !n.found).length !== 1 ? 's' : ''} + +
+

+ This key is not defined in any hierarchy level for these nodes +

+
+
+
+ {#each keyNodeData.nodes.filter(n => !n.found) as node (node.nodeId)} + + {/each} +
+
+
+ {/if} +
+ {:else} + +
+ {#if keyNodeData.nodes.length === 0} +
+

No nodes found

+
+ {:else} + {#each keyNodeData.nodes as node (node.nodeId)} +
+
+
+ + {#if node.found} +
+ {#if isComplexValue(node.value)} +
{formatValue(node.value)}
+ {:else} + {formatValue(node.value)} + {/if} +
+ {#if expertMode.enabled} +
+ Source: {node.sourceFile} + Level: {node.hierarchyLevel} +
+ {/if} + {:else} +

Not defined

+ {/if} +
+
+ {#if node.found} + + Defined + + {:else} + + Not Defined + + {/if} +
+
+
+ {/each} + {/if} +
+ {/if} + {/if} +
+
+ {/if} + + + {#if !searchQuery && !selectedKey} +
+ + + +

Search for a Hiera Key

+

+ Enter a key name above to see its resolved value across all nodes. You can search by partial key name. +

+
+

Example searches:

+
+ + + +
+
+
+ {/if} + diff --git a/frontend/src/components/HieraSetupGuide.svelte b/frontend/src/components/HieraSetupGuide.svelte new file mode 100644 index 0000000..0a031c3 --- /dev/null +++ b/frontend/src/components/HieraSetupGuide.svelte @@ -0,0 +1,606 @@ + + +
+
+

Hiera Integration Setup

+

+ Configure Pabawi to analyze your Puppet control repository, providing deep visibility into + Hiera data, key resolution, and static code analysis capabilities. +

+
+ +
+
+

Prerequisites

+
    +
  • + • + A Puppet control repository with Hiera 5 configuration +
  • +
  • + • + Local filesystem access to the control repository directory +
  • +
  • + • + (Optional) PuppetDB integration for fact retrieval +
  • +
  • + • + (Optional) Local fact files in Puppetserver format +
  • +
+
+
+ +
+
+

Step 1: Prepare Your Control Repository

+

+ Ensure your control repository follows the standard Puppet structure: +

+ +
+
+ Expected Directory Structure + +
+
{controlRepoStructure}
+
+ +
+
+ Example hiera.yaml + +
+
{hieraYamlExample}
+
+
+
+ +
+
+

Step 2: Configure Control Repository Path

+

+ Add the basic Hiera configuration to your backend/.env file: +

+ +
+
+ Basic Configuration + +
+
{basicConfig}
+
+ +
+

Configuration Options:

+
    +
  • HIERA_CONTROL_REPO_PATH: Absolute path to your control repository
  • +
  • HIERA_CONFIG_PATH: Path to hiera.yaml relative to control repo (default: hiera.yaml)
  • +
  • HIERA_ENVIRONMENTS: JSON array of environment names to scan
  • +
+
+
+
+ +
+
+

Step 3: Configure Fact Source

+

+ Choose how Pabawi retrieves node facts for Hiera resolution: +

+ +
+ + + +
+ + {#if selectedFactSource === "puppetdb"} +
+
+ PuppetDB Fact Source + +
+
{puppetdbFactConfig}
+
+ +
+

āœ… PuppetDB Benefits:

+
    +
  • • Facts are always current from the last Puppet run
  • +
  • • No manual fact file management required
  • +
  • • Automatic discovery of all nodes
  • +
  • • Requires PuppetDB integration to be configured
  • +
+
+ {:else} +
+
+ Local Fact Files + +
+
{localFactConfig}
+
+ +
+

Local Fact File Format

+

+ Fact files should be JSON files named by node hostname (e.g., web01.example.com.json): +

+
+
{'{'}
+
"name": "web01.example.com",
+
"values": {'{'}
+
"os": {'{'} "family": "RedHat", "name": "CentOS" {'}'},
+
"networking": {'{'} "hostname": "web01" {'}'},
+
"environment": "production"
+
{'}'}
+
{'}'}
+
+
+ +
+

āš ļø Local Facts Limitations:

+
    +
  • • Facts may become outdated if not regularly exported
  • +
  • • Manual management of fact files required
  • +
  • • Export facts using: puppet facts --render-as json > node.json
  • +
+
+ {/if} +
+
+ +
+
+

Step 4: Catalog Compilation Mode (Optional)

+

+ Enable catalog compilation for advanced Hiera resolution that includes Puppet code variables: +

+ +
+ + + {catalogCompilationEnabled ? 'Catalog Compilation Enabled' : 'Catalog Compilation Disabled (Default)'} + +
+ + {#if catalogCompilationEnabled} +
+
+ Catalog Compilation Config + +
+
{catalogCompilationConfig}
+
+ {/if} + +
+
+

āœ… Benefits:

+
    +
  • • Resolves variables defined in Puppet code
  • +
  • • More accurate Hiera resolution
  • +
  • • Detects class parameter defaults
  • +
+
+ +
+

āš ļø Performance Impact:

+
    +
  • • Slower resolution (compiles full catalog)
  • +
  • • Higher memory usage
  • +
  • • Requires Puppetserver access
  • +
  • • Results are cached to mitigate impact
  • +
+
+
+ +
+

šŸ’” Recommendation:

+

+ Start with catalog compilation disabled. Most Hiera lookups work correctly with fact-only resolution. + Enable catalog compilation only if you need to resolve variables that are defined in Puppet code (not facts). +

+
+
+
+ +
+
+

Step 5: Advanced Configuration (Optional)

+ + + + {#if showAdvanced} +
+
+ Advanced Options + +
+
{advancedConfig}
+
+ +
+

Configuration Options:

+
    +
  • HIERA_CACHE_TTL: Cache duration in milliseconds (default: 300000 = 5 min)
  • +
  • HIERA_CACHE_MAX_ENTRIES: Maximum cached entries (default: 10000)
  • +
  • HIERA_CODE_ANALYSIS_ENABLED: Enable static code analysis
  • +
  • HIERA_CODE_ANALYSIS_LINT_ENABLED: Enable Puppet lint checks
  • +
  • HIERA_CODE_ANALYSIS_MODULE_UPDATE_CHECK: Check Puppetfile for updates
  • +
  • HIERA_CODE_ANALYSIS_INTERVAL: Analysis refresh interval (default: 3600000 = 1 hour)
  • +
  • HIERA_CODE_ANALYSIS_EXCLUSION_PATTERNS: Glob patterns to exclude from analysis
  • +
+
+ {/if} +
+
+ +
+
+

Step 6: Restart Backend Server

+

Apply the configuration by restarting the backend:

+
+
cd backend
+
npm run dev
+
+
+
+ +
+
+

Step 7: Verify Connection

+

Test the Hiera integration configuration:

+ + + + {#if testResult} +
+
+ {testResult.success ? 'āœ…' : 'āŒ'} +
+

+ {testResult.success ? 'Connection Successful' : 'Connection Failed'} +

+

+ {testResult.message} +

+ {#if testResult.details} +
+ + Show Details + +
{JSON.stringify(testResult.details, null, 2)}
+
+ {/if} +
+
+
+ {/if} + +
+

Or verify via API:

+
+ curl http://localhost:3000/api/integrations/hiera/status +
+
+
+
+ +
+
+

Features Available

+
+
+ šŸ”‘ +

Hiera Key Discovery

+

Browse and search all Hiera keys

+
+
+ šŸŽÆ +

Key Resolution

+

Resolve keys for specific nodes

+
+
+ šŸ“Š +

Code Analysis

+

Detect unused code and lint issues

+
+
+ šŸ“¦ +

Module Updates

+

Check Puppetfile for updates

+
+
+
+
+ +
+
+

Troubleshooting

+ +
+
+ + Control Repository Not Found + +
+

Error: "Control repository path does not exist"

+
    +
  • Verify HIERA_CONTROL_REPO_PATH is an absolute path
  • +
  • Check directory permissions are readable by the backend process
  • +
  • Ensure the path exists: ls -la /path/to/control-repo
  • +
+
+
+ +
+ + Invalid hiera.yaml + +
+

Error: "Failed to parse hiera.yaml"

+
    +
  • Ensure hiera.yaml uses Hiera 5 format (version: 5)
  • +
  • Validate YAML syntax: ruby -ryaml -e "YAML.load_file('hiera.yaml')"
  • +
  • Check for indentation errors in hierarchy definitions
  • +
+
+
+ +
+ + Facts Not Available + +
+

Error: "No facts available for node"

+
    +
  • If using PuppetDB: Verify PuppetDB integration is configured and healthy
  • +
  • If using local facts: Check HIERA_FACT_SOURCE_LOCAL_PATH points to correct directory
  • +
  • Ensure fact files are named correctly: hostname.json
  • +
  • Verify fact file format matches Puppetserver export format
  • +
+
+
+ +
+ + Hiera Resolution Incomplete + +
+

Issue: Some Hiera variables not resolving correctly

+
    +
  • Variables from Puppet code require catalog compilation mode
  • +
  • Enable HIERA_CATALOG_COMPILATION_ENABLED=true for full resolution
  • +
  • Check that all required facts are available for the node
  • +
  • Verify hierarchy paths use correct variable syntax: %{'{'}facts.os.family{'}'}
  • +
+
+
+ +
+ + Code Analysis Not Working + +
+

Issue: Code analysis results are empty or incomplete

+
    +
  • Ensure HIERA_CODE_ANALYSIS_ENABLED=true
  • +
  • Check exclusion patterns aren't too broad
  • +
  • Verify manifests directory exists in control repo
  • +
  • Wait for analysis interval to complete (default: 1 hour)
  • +
+
+
+
+
+
+ +
+

+ For detailed documentation, see configuration.md +

+
+
diff --git a/frontend/src/components/IntegrationStatus.svelte b/frontend/src/components/IntegrationStatus.svelte index 9787511..9bd8dc8 100644 --- a/frontend/src/components/IntegrationStatus.svelte +++ b/frontend/src/components/IntegrationStatus.svelte @@ -86,6 +86,147 @@ } } + // Get integration-specific icon (overrides type icon for specific integrations) + function getIntegrationIcon(name: string, type: string): string { + switch (name) { + case 'hiera': + // Hiera uses a hierarchical/layers icon + return 'M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10'; + case 'puppetdb': + // Database icon + return 'M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4'; + case 'puppetserver': + // Server icon + return 'M5 12h14M5 12a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v4a2 2 0 01-2 2M5 12a2 2 0 00-2 2v4a2 2 0 002 2h14a2 2 0 002-2v-4a2 2 0 00-2-2m-2-4h.01M17 16h.01'; + case 'bolt': + // Lightning bolt icon + return 'M13 10V3L4 14h7v7l9-11h-7z'; + default: + return getTypeIcon(type); + } + } + + // Get Hiera-specific details for display + function getHieraDetails(integration: IntegrationStatus): { + keyCount?: number; + fileCount?: number; + controlRepoPath?: string; + lastScanTime?: string; + hieraConfigValid?: boolean; + factSourceAvailable?: boolean; + controlRepoAccessible?: boolean; + status?: string; + structure?: Record; + warnings?: string[]; + } | null { + if (integration.name !== 'hiera' || !integration.details) { + return null; + } + const details = integration.details as Record; + return { + keyCount: typeof details.keyCount === 'number' ? details.keyCount : undefined, + fileCount: typeof details.fileCount === 'number' ? details.fileCount : undefined, + controlRepoPath: typeof details.controlRepoPath === 'string' ? details.controlRepoPath : undefined, + lastScanTime: typeof details.lastScanTime === 'string' ? details.lastScanTime : undefined, + hieraConfigValid: typeof details.hieraConfigValid === 'boolean' ? details.hieraConfigValid : undefined, + factSourceAvailable: typeof details.factSourceAvailable === 'boolean' ? details.factSourceAvailable : undefined, + controlRepoAccessible: typeof details.controlRepoAccessible === 'boolean' ? details.controlRepoAccessible : undefined, + status: typeof details.status === 'string' ? details.status : undefined, + structure: typeof details.structure === 'object' && details.structure !== null + ? details.structure as Record + : undefined, + warnings: Array.isArray(details.warnings) ? details.warnings as string[] : undefined, + }; + } + + // Get integration-specific troubleshooting steps + function getTroubleshootingSteps(integration: IntegrationStatus): string[] { + if (integration.name === 'hiera') { + if (integration.status === 'not_configured') { + return [ + 'Set HIERA_CONTROL_REPO_PATH environment variable to your control repository path', + 'Ensure the control repository contains a valid hiera.yaml file', + 'Verify the hieradata directory exists (data/, hieradata/, or hiera/)', + 'Check the setup instructions for required configuration options', + ]; + } else if (integration.status === 'error' || integration.status === 'disconnected') { + return [ + 'Verify the control repository path exists and is accessible', + 'Check that hiera.yaml is valid YAML and follows Hiera 5 format', + 'Ensure the hieradata directory contains valid YAML/JSON files', + 'Review the error details for specific file or syntax issues', + 'Try reloading the integration after fixing any issues', + ]; + } else if (integration.status === 'degraded') { + return [ + 'Some Hiera features may be unavailable - check warnings for details', + 'Verify PuppetDB connection if fact resolution is failing', + 'Check for syntax errors in hieradata files', + 'Try refreshing to see if issues resolve', + ]; + } + } + + // Default troubleshooting steps + if (integration.status === 'not_configured') { + return [ + 'Configure the integration using environment variables or config file', + 'Check the setup instructions for required parameters', + ]; + } else if (integration.status === 'error' || integration.status === 'disconnected') { + return [ + 'Verify if you have the command available', + 'Verify the service is running and accessible', + 'Check network connectivity and firewall rules', + 'Verify authentication credentials are correct', + 'Review service logs for detailed error information', + ]; + } else if (integration.status === 'degraded') { + return [ + 'Some capabilities are failing - check logs for details', + 'Working capabilities can still be used normally', + 'Try refreshing to see if issues resolve', + ]; + } + + return []; + } + + // Get Hiera-specific error information for actionable display + function getHieraErrorInfo(integration: IntegrationStatus): { errors: string[]; warnings: string[]; structure?: Record } | null { + if (integration.name !== 'hiera' || !integration.details) { + return null; + } + const details = integration.details as Record; + return { + errors: Array.isArray(details.errors) ? details.errors as string[] : [], + warnings: Array.isArray(details.warnings) ? details.warnings as string[] : [], + structure: typeof details.structure === 'object' && details.structure !== null + ? details.structure as Record + : undefined, + }; + } + + // Get actionable message for Hiera errors + function getHieraActionableMessage(errorInfo: { errors: string[]; warnings: string[]; structure?: Record }): string { + if (errorInfo.errors.length > 0) { + const firstError = errorInfo.errors[0]; + if (firstError.includes('does not exist')) { + return 'The control repository path does not exist. Check the HIERA_CONTROL_REPO_PATH environment variable.'; + } + if (firstError.includes('hiera.yaml not found')) { + return 'No hiera.yaml file found. Ensure your control repository has a valid Hiera 5 configuration.'; + } + if (firstError.includes('not a directory')) { + return 'The configured path is not a directory. Provide a path to your control repository root.'; + } + if (firstError.includes('Cannot access')) { + return 'Cannot access the control repository. Check file permissions and path accessibility.'; + } + } + return 'Check the error details below for more information.'; + } + // Get display name for integration function getDisplayName(name: string): string { // Capitalize first letter and replace hyphens with spaces @@ -179,7 +320,7 @@ stroke-linecap="round" stroke-linejoin="round" stroke-width="2" - d={getTypeIcon(integration.type)} + d={getIntegrationIcon(integration.name, integration.type)} /> @@ -249,44 +390,136 @@ {/if} - {#if integration.status === 'not_configured'} - + + {#if integration.name === 'hiera' && integration.status === 'connected'} + {@const hieraDetails = getHieraDetails(integration)} + {#if hieraDetails} +
+ {#if hieraDetails.keyCount !== undefined} +
+

+ {hieraDetails.keyCount} keys +

+
+ {/if} + {#if hieraDetails.fileCount !== undefined} +
+

+ {hieraDetails.fileCount} files +

+
+ {/if} +
+ {/if} {/if} + + + {#if integration.details && integration.status === 'error'} -
- - Show error details - -
{JSON.stringify(integration.details, null, 2)}
-
+ + {#if integration.name === 'hiera'} + {@const hieraErrorInfo = getHieraErrorInfo(integration)} + {#if hieraErrorInfo} +
+ +
+
+ + + +

+ {getHieraActionableMessage(hieraErrorInfo)} +

+
+
+ + + {#if hieraErrorInfo.errors.length > 0} +
+

Errors:

+
    + {#each hieraErrorInfo.errors as error} +
  • {error}
  • + {/each} +
+
+ {/if} + + + {#if hieraErrorInfo.warnings.length > 0} +
+

Warnings:

+
    + {#each hieraErrorInfo.warnings as warning} +
  • {warning}
  • + {/each} +
+
+ {/if} + + + {#if hieraErrorInfo.structure} +
+ + Repository structure check + +
+ {#each Object.entries(hieraErrorInfo.structure) as [key, value]} +
+ {#if value} + + + + {:else} + + + + {/if} + {key.replace(/^has/, '').replace(/([A-Z])/g, ' $1').trim()} +
+ {/each} +
+
+ {/if} +
+ {/if} + {:else} + +
+ + Show error details + +
{JSON.stringify(integration.details, null, 2)}
+
+ {/if} {/if} @@ -306,6 +539,122 @@ {/if} + + {#if integration.name === 'hiera'} + {@const hieraDetails = getHieraDetails(integration)} + {#if hieraDetails?.controlRepoPath} +
+ Control Repo: + {hieraDetails.controlRepoPath} +
+ {/if} + + +
+ {#if hieraDetails?.controlRepoAccessible !== undefined} +
+ {#if hieraDetails.controlRepoAccessible} + + + + Repo accessible + {:else} + + + + Repo inaccessible + {/if} +
+ {/if} + {#if hieraDetails?.hieraConfigValid !== undefined} +
+ {#if hieraDetails.hieraConfigValid} + + + + hiera.yaml valid + {:else} + + + + hiera.yaml invalid + {/if} +
+ {/if} + {#if hieraDetails?.factSourceAvailable !== undefined} +
+ {#if hieraDetails.factSourceAvailable} + + + + Facts available + {:else} + + + + No fact source + {/if} +
+ {/if} +
+ + {#if hieraDetails?.lastScanTime} +
+ Last Scan: + {hieraDetails.lastScanTime} +
+ {/if} + {#if hieraDetails?.keyCount !== undefined} +
+ Total Keys: + {hieraDetails.keyCount} +
+ {/if} + {#if hieraDetails?.fileCount !== undefined} +
+ Total Files: + {hieraDetails.fileCount} +
+ {/if} + + + {#if hieraDetails?.structure} +
+ + Repository Structure + +
+ {#each Object.entries(hieraDetails.structure) as [key, value]} +
+ {#if value} + + + + {:else} + + + + {/if} + {key.replace(/^has/, '').replace(/([A-Z])/g, ' $1').trim()} +
+ {/each} +
+
+ {/if} + + + {#if hieraDetails?.warnings && hieraDetails.warnings.length > 0} +
+

āš ļø Warnings:

+
    + {#each hieraDetails.warnings as warning} +
  • {warning}
  • + {/each} +
+
+ {/if} + {/if} + {#if integration.responseTime !== undefined}
Response Time: @@ -327,25 +676,18 @@
{/if} -
-

šŸ”§ Troubleshooting:

-
    - {#if integration.status === 'not_configured'} -
  • Configure the integration using environment variables or config file
  • -
  • Check the setup instructions for required parameters
  • - {:else if integration.status === 'error' || integration.status === 'disconnected'} -
  • Verify if you have the command available
  • -
  • Verify the service is running and accessible
  • -
  • Check network connectivity and firewall rules
  • -
  • Verify authentication credentials are correct
  • -
  • Review service logs for detailed error information
  • - {:else if integration.status === 'degraded'} -
  • Some capabilities are failing - check logs for details
  • -
  • Working capabilities can still be used normally
  • -
  • Try refreshing to see if issues resolve
  • - {/if} -
-
+ + {#if getTroubleshootingSteps(integration).length > 0} + {@const troubleshootingSteps = getTroubleshootingSteps(integration)} +
+

šŸ”§ Troubleshooting:

+
    + {#each troubleshootingSteps as step} +
  • {step}
  • + {/each} +
+
+ {/if} {/if} diff --git a/frontend/src/components/NodeHieraTab.svelte b/frontend/src/components/NodeHieraTab.svelte new file mode 100644 index 0000000..2f28fad --- /dev/null +++ b/frontend/src/components/NodeHieraTab.svelte @@ -0,0 +1,634 @@ + + + +
+ {#if loading} +
+ +
+ {:else if error} + + + + {#if error.includes('not configured')} +
+
+ + + +
+

Setup Required

+

+ To view Hiera data for this node, you need to configure the Hiera integration with your Puppet control repository. +

+
    +
  1. Go to the Integration Setup page
  2. +
  3. Configure the path to your Puppet control repository
  4. +
  5. Ensure the repository contains a valid hiera.yaml file
  6. +
  7. Return to this page to view Hiera data
  8. +
+
+
+
+ {/if} + {:else if hieraData} + +
+
+
+

Hiera Data

+ + Facts: {hieraData.factSource === 'puppetdb' ? 'PuppetDB' : 'Local'} + +
+
+ {hieraData.keys.length} total keys + {hieraData.usedKeys.length} used + {hieraData.unusedKeys.length} unused +
+
+ + + {#if hieraData.warnings && hieraData.warnings.length > 0} +
+
+ + + +
+ {#each hieraData.warnings as warning} +

{warning}

+ {/each} +
+
+
+ {/if} +
+ + +
+
+ +
+ + + + + {#if searchQuery} + + {/if} +
+ + +
+ Filter: +
+ + + +
+
+
+ + {#if searchQuery || filterMode !== 'all'} +

+ Showing {filteredKeys().length} of {hieraData.keys.length} keys +

+ {/if} +
+ + +
+ {#if filteredKeys().length === 0} +
+ + + +

+ {searchQuery ? 'No keys match your search' : filterMode !== 'all' ? `No ${filterMode} keys found` : 'No Hiera keys found for this node'} +

+
+ {:else} + {#each filteredKeys() as keyInfo (keyInfo.key)} +
+ + + + +
+ +
+ + + {#if expandedKeys.has(keyInfo.key)} +
+ {#if keyInfo.found} + +
+

Resolved Value

+
+ {#if isComplexValue(keyInfo.resolvedValue)} +
{formatValue(keyInfo.resolvedValue)}
+ {:else} + {formatValue(keyInfo.resolvedValue)} + {/if} +
+
+ + +
+
+

Source File

+

{keyInfo.sourceFile}

+
+
+

Hierarchy Level

+

{keyInfo.hierarchyLevel}

+
+
+ + + {#if expertMode.enabled} +
+

Expert Details

+
+
+ Lookup Method: + + {keyInfo.lookupMethod} + +
+ {#if keyInfo.interpolatedVariables && Object.keys(keyInfo.interpolatedVariables).length > 0} +
+ Interpolated Variables: +
+
{JSON.stringify(keyInfo.interpolatedVariables, null, 2)}
+
+
+ {/if} +
+
+ + + {#if keyInfo.allValues && keyInfo.allValues.length > 1} +
+

Values from All Hierarchy Levels

+
+ {#each keyInfo.allValues as location, index} +
+
+
+
+ {location.hierarchyLevel} + {#if index === 0} + + Winner + + {/if} +
+

{location.file}:{location.lineNumber}

+
+
+
+ {#if isComplexValue(location.value)} +
{formatValue(location.value)}
+ {:else} + {formatValue(location.value)} + {/if} +
+
+ {/each} +
+
+ {/if} + {/if} + {:else} +
+

+ This key was not found in any hierarchy level for this node's facts. +

+
+ {/if} +
+ {/if} +
+ {/each} + {/if} +
+ {/if} + + + {#if selectedKey} + + {/if} +
diff --git a/frontend/src/components/PuppetdbSetupGuide.svelte b/frontend/src/components/PuppetdbSetupGuide.svelte index 331ae6c..4edd602 100644 --- a/frontend/src/components/PuppetdbSetupGuide.svelte +++ b/frontend/src/components/PuppetdbSetupGuide.svelte @@ -21,8 +21,8 @@ PUPPETDB_SERVER_URL=https://puppetdb.example.com PUPPETDB_PORT=8081 PUPPETDB_SSL_ENABLED=true PUPPETDB_SSL_CA=/etc/puppetlabs/puppet/ssl/certs/ca.pem -PUPPETDB_SSL_CERT=/etc/puppetlabs/puppet/ssl/certs/admin.pem -PUPPETDB_SSL_KEY=/etc/puppetlabs/puppet/ssl/private_keys/admin.pem +PUPPETDB_SSL_CERT=/etc/puppetlabs/puppet/ssl/certs/hostname.pem +PUPPETDB_SSL_KEY=/etc/puppetlabs/puppet/ssl/private_keys/hostname.pem PUPPETDB_SSL_REJECT_UNAUTHORIZED=true`; const advancedConfig = `# Advanced Configuration @@ -90,7 +90,7 @@ PUPPETDB_PRIORITY=10`; šŸ”’ SSL Certificate -

Required for Open Source Puppet

+

Required for Open Source Puppet and OpenVox

@@ -106,12 +106,25 @@ PUPPETDB_PRIORITY=10`; {:else}
-

Locate SSL Certificates

-

Default certificate locations on Puppetserver:

+

Use existing SSL Certificates

+

If Pabawi runs on a node managed by Puppet, you can use the local existing puppet agent certificates:

CA: /etc/puppetlabs/puppet/ssl/certs/ca.pem
-
Cert: /etc/puppetlabs/puppet/ssl/certs/admin.pem
-
Key: /etc/puppetlabs/puppet/ssl/private_keys/admin.pem
+
Cert: /etc/puppetlabs/puppet/ssl/certs/hostname.pem
+
Key: /etc/puppetlabs/puppet/ssl/private_keys/hostname.pem
+
+ +

Generate dedicated SSL Certificates

+

Alternatively, you can + generate on your Puppet server a dedicated certificate to use for Pabawi. Run, as root on the Puppet server:

+
+ pupperserver ca generate --certname pabawi +
+

The command generates the following files which should be copied on your Pabawi host (in the paths you configure for PUPPETDB_SSL_CA , PUPPETDB_SSL_CERT and PUPPETDB_SSL_KEY settings):

+
+
CA: /etc/puppetlabs/puppet/ssl/certs/ca.pem
+
Cert: /etc/puppetlabs/puppet/ssl/certs/pabawi.pem
+
Key: /etc/puppetlabs/puppet/ssl/private_keys/pabawi.pem
{/if} diff --git a/frontend/src/components/README_RE_EXECUTION.md b/frontend/src/components/README_RE_EXECUTION.md deleted file mode 100644 index ce8aec2..0000000 --- a/frontend/src/components/README_RE_EXECUTION.md +++ /dev/null @@ -1,89 +0,0 @@ -# Re-Execution Feature Implementation - -## Overview - -The re-execution feature allows users to quickly repeat previous executions with the same or modified parameters. This implementation adds re-execute buttons throughout the UI and supports parameter pre-filling. - -## Components - -### ReExecutionButton.svelte - -A reusable button component that handles navigation to the appropriate execution interface with pre-filled parameters. - -**Props:** - -- `execution`: ExecutionResult - The execution to re-execute -- `currentNodeId?`: string - If provided, sets this as the target node (used in NodeDetailPage) -- `disabled?`: boolean - Whether the button is disabled -- `size?`: 'sm' | 'md' - Button size -- `variant?`: 'button' | 'icon' - Display as full button or icon-only - -**Features:** - -- Handles different execution types (command, task, puppet, package) -- Stores execution parameters in sessionStorage for pre-filling -- Shows loading state during navigation -- Automatically disabled when execution is running - -## Integration Points - -### ExecutionsPage - -- Added re-execute button column to executions table -- Each row has an icon button to re-execute -- Modal footer includes a full re-execute button -- Clicking re-execute navigates to the appropriate interface with pre-filled parameters - -### NodeDetailPage - -- Added re-execute button column to execution history table -- Re-execute button automatically sets the current node as the target -- Checks sessionStorage on mount for pre-filled parameters -- Supports command, task, puppet, and package re-execution - -### TaskRunInterface - -- Added `initialTaskName` and `initialParameters` props -- Automatically selects and pre-fills task when initial values are provided -- Uses `$effect` to watch for tasksByModule changes and pre-select task - -## Parameter Pre-filling Flow - -1. User clicks re-execute button on an execution -2. ReExecutionButton stores execution data in sessionStorage: - - `reExecuteCommand`: Command string - - `reExecuteTask`: JSON with taskName and parameters - - `reExecutePuppet`: JSON with puppet parameters - - `reExecutePackage`: JSON with package parameters -3. ReExecutionButton navigates to the appropriate page/tab -4. Target page checks sessionStorage on mount -5. Parameters are pre-filled in the appropriate interface -6. SessionStorage is cleared after reading -7. User can modify parameters before executing - -## User Experience - -- **Quick Re-execution**: One-click access to repeat any execution -- **Context-Aware**: When re-executing from NodeDetailPage, automatically targets the current node -- **Editable Parameters**: All pre-filled parameters can be modified before execution -- **Visual Feedback**: Loading states and toast notifications inform the user -- **Accessibility**: Icon buttons include proper titles and ARIA attributes - -## Requirements Validated - -This implementation validates the following requirements: - -- **7.1**: Re-execute button displayed for all action types in executions page -- **7.2**: Navigation to appropriate execution interface with pre-filled parameters -- **7.3**: Preservation of target nodes, action type, and parameters -- **7.4**: Parameters are editable before execution -- **8.1**: Re-execute button in node detail execution history -- **8.2**: Navigation with node and parameters pre-filled from node detail page -- **8.4**: Target node set to current node when re-executing from node detail page - -## Future Enhancements - -- Support for multi-node command execution interface -- Backend API endpoint for creating re-execution records with linkage -- Display of re-execution history and relationships -- Bulk re-execution of multiple executions diff --git a/frontend/src/components/index.ts b/frontend/src/components/index.ts index 40b6986..510a4c8 100644 --- a/frontend/src/components/index.ts +++ b/frontend/src/components/index.ts @@ -1,6 +1,7 @@ export { default as CatalogComparison } from "./CatalogComparison.svelte"; export { default as CatalogViewer } from "./CatalogViewer.svelte"; export { default as CertificateManagement } from "./CertificateManagement.svelte"; +export { default as CodeAnalysisTab } from "./CodeAnalysisTab.svelte"; export { default as CommandOutput } from "./CommandOutput.svelte"; export { default as DetailedErrorDisplay } from "./DetailedErrorDisplay.svelte"; export { default as EnvironmentSelector } from "./EnvironmentSelector.svelte"; @@ -8,11 +9,14 @@ export { default as ErrorAlert } from "./ErrorAlert.svelte"; export { default as ErrorBoundary } from "./ErrorBoundary.svelte"; export { default as EventsViewer } from "./EventsViewer.svelte"; export { default as FactsViewer } from "./FactsViewer.svelte"; +export { default as GlobalHieraTab } from "./GlobalHieraTab.svelte"; +export { default as HieraSetupGuide } from "./HieraSetupGuide.svelte"; export { default as MultiSourceFactsViewer } from "./MultiSourceFactsViewer.svelte"; export { default as IntegrationStatus } from "./IntegrationStatus.svelte"; export { default as LoadingSpinner } from "./LoadingSpinner.svelte"; export { default as ManagedResourcesViewer } from "./ManagedResourcesViewer.svelte"; export { default as Navigation } from "./Navigation.svelte"; +export { default as NodeHieraTab } from "./NodeHieraTab.svelte"; export { default as NodeStatus } from "./NodeStatus.svelte"; export { default as PuppetDBAdmin } from "./PuppetDBAdmin.svelte"; export { default as PuppetOutputViewer } from "./PuppetOutputViewer.svelte"; diff --git a/frontend/src/pages/IntegrationSetupPage.svelte b/frontend/src/pages/IntegrationSetupPage.svelte index 0e1ca1f..de3df09 100644 --- a/frontend/src/pages/IntegrationSetupPage.svelte +++ b/frontend/src/pages/IntegrationSetupPage.svelte @@ -1,6 +1,6 @@ - -
- -
-

Certificate Management

-

- Manage Puppetserver CA certificates -

-
- - -
- -
- -
-
- - - -
- -
-
- - -
- - -
- - - -
- - - {#if activeFilters().length > 0} -
- Active filters: - {#each activeFilters() as filter} - - {filter} - - - {/each} -
- {/if} - - - {#if hasSelectedCertificates} -
-
- - {selectedCertnames.size} certificate{selectedCertnames.size !== 1 ? 's' : ''} selected - -
- - -
-
- {#if bulkOperationInProgress} -
-
- - - - Processing certificates... Please wait. -
-
- {/if} -
- {/if} - - - {#if expertMode.enabled && !loading} -
-
- - - -
-

Expert Mode Active

-
-

API Endpoint: GET /api/integrations/puppetserver/certificates

-

Setup Instructions:

-
    -
  • Configure PUPPETSERVER_SERVER_URL environment variable
  • -
  • Set PUPPETSERVER_TOKEN or configure SSL certificates
  • -
  • Ensure Puppetserver CA API is accessible on port 8140
  • -
  • Verify auth.conf allows certificate API access
  • -
-

Troubleshooting:

-
    -
  • Check browser console for detailed API request/response logs
  • -
  • Verify X-Expert-Mode header is being sent with requests
  • -
  • Review backend logs for Puppetserver connection errors
  • -
  • Test Puppetserver API directly: curl -k https://puppetserver:8140/puppet-ca/v1/certificate_statuses
  • -
-
-
-
-
- {/if} - - - {#if loading && certificates.length === 0} -
- -
- {:else if error && certificates.length === 0} - -
-
- - - -
-

Error loading certificates

-

{error}

-
- -
-
-
-
- {:else if filteredCertificates().length === 0} - -
- - - -

No certificates found

-

- {activeFilters().length > 0 ? 'Try adjusting your filters' : 'No certificates available'} -

-
- {:else} - -
- - - - - - - - - - - - - {#each filteredCertificates() as cert (cert.certname)} - - - - - - - - - {/each} - -
- - - Certname - - Status - - Fingerprint - - Expiration - - Actions -
- toggleCertificate(cert.certname)} - class="h-4 w-4 rounded border-gray-300 text-primary-600 focus:ring-primary-500 dark:border-gray-600 dark:bg-gray-700" - /> - - {cert.certname} - - - {cert.status} - - - - {cert.fingerprint.substring(0, 16)}... - - - {formatDate(cert.not_after)} - -
- {#if cert.status === 'requested'} - - {/if} - {#if cert.status === 'signed'} - - {/if} -
-
-
- - -
- Showing {filteredCertificates().length} of {certificates.length} certificates -
- {/if} - - - {#if confirmDialog.show} - - {/if} -
diff --git a/frontend/src/components/NodeHieraTab.svelte b/frontend/src/components/NodeHieraTab.svelte index 2f28fad..8132fc9 100644 --- a/frontend/src/components/NodeHieraTab.svelte +++ b/frontend/src/components/NodeHieraTab.svelte @@ -25,6 +25,15 @@ interpolatedVariables?: Record; } + interface HierarchyFileInfo { + path: string; + hierarchyLevel: string; + interpolatedPath: string; + exists: boolean; + canResolve: boolean; + unresolvedVariables?: string[]; + } + interface NodeHieraDataResponse { nodeId: string; keys: HieraResolutionInfo[]; @@ -32,6 +41,8 @@ unusedKeys: string[]; factSource: 'puppetdb' | 'local'; warnings?: string[]; + hierarchyFiles: HierarchyFileInfo[]; + totalKeys: number; } interface Props { @@ -45,7 +56,8 @@ let loading = $state(true); let error = $state(null); let searchQuery = $state(''); - let filterMode = $state<'all' | 'used' | 'unused'>('all'); + let filterMode = $state<'all' | 'used' | 'unused'>('used'); + let classificationMode = $state<'found' | 'classes'>('found'); let expandedKeys = $state>(new Set()); let selectedKey = $state(null); @@ -75,16 +87,21 @@ } // Filter keys based on search and filter mode - const filteredKeys = $derived(() => { - if (!hieraData) return []; + let filteredKeys = $state([]); + + $effect(() => { + if (!hieraData || !hieraData.keys) { + filteredKeys = []; + return; + } let keys = hieraData.keys; // Apply filter mode if (filterMode === 'used') { - keys = keys.filter(k => hieraData!.usedKeys.includes(k.key)); + keys = keys.filter(k => hieraData.usedKeys.includes(k.key)); } else if (filterMode === 'unused') { - keys = keys.filter(k => hieraData!.unusedKeys.includes(k.key)); + keys = keys.filter(k => hieraData.unusedKeys.includes(k.key)); } // Apply search filter @@ -96,8 +113,8 @@ ); } - // Sort alphabetically by key name (use spread to avoid mutating state) - return [...keys].sort((a, b) => a.key.localeCompare(b.key)); + // Sort alphabetically by key name + filteredKeys = [...keys].sort((a, b) => a.key.localeCompare(b.key)); }); // Check if a key is used @@ -236,6 +253,44 @@ {/if} + + {#if hieraData.hierarchyFiles && hieraData.hierarchyFiles.length > 0} +
+

Hierarchy Files

+
+ {#each hieraData.hierarchyFiles as fileInfo} +
+
+
+ {fileInfo.hierarchyLevel} + {#if fileInfo.exists} + + Found + + {:else} + + Not Found + + {/if} + {#if !fileInfo.canResolve} + + Unresolved Variables + + {/if} +
+

{fileInfo.interpolatedPath}

+ {#if fileInfo.unresolvedVariables && fileInfo.unresolvedVariables.length > 0} +

+ Unresolved: {fileInfo.unresolvedVariables.join(', ')} +

+ {/if} +
+
+ {/each} +
+
+ {/if} +
@@ -275,44 +330,85 @@
-
- Filter: -
- - - +
+ +
+ Classification: +
+ + +
+
+ + +
+ Filter: +
+ + + +
{#if searchQuery || filterMode !== 'all'}

- Showing {filteredKeys().length} of {hieraData.keys.length} keys + Showing {filteredKeys.length} of {hieraData.keys.length} keys

{/if} + + + {#if classificationMode === 'classes'} +
+
+ + + +
+

+ Class-Matched mode shows the same results as Found Keys mode until class detection is fixed. + Currently showing all keys with resolved values as "used". +

+
+
+
+ {/if}
- {#if filteredKeys().length === 0} + {#if filteredKeys.length === 0}
@@ -322,7 +418,7 @@

{:else} - {#each filteredKeys() as keyInfo (keyInfo.key)} + {#each filteredKeys as keyInfo (keyInfo.key)}
+ {#if puppetReports.length > 5} +
- {/if} -
+
+ {/if} {/if}
@@ -1582,13 +1569,6 @@
- - {#if activePuppetSubTab === 'certificate-status'} -
-
-
-

Certificate Status

- - Puppetserver - -
- {#if !certificateLoading && !certificateError} - - {/if} -
- - {#if certificateLoading} -
- -
- {:else if certificateError} -
- - - -
-
- - - -
-

Troubleshooting Tips

-
    -
  • Verify that Puppetserver is running and accessible
  • -
  • Check that the node certname matches the certificate name in Puppetserver
  • -
  • Ensure the Puppetserver integration is properly configured with valid credentials
  • -
  • If the node hasn't registered yet, it needs to run the Puppet agent first
  • -
  • Check the Puppetserver logs for any certificate-related errors
  • -
-
-
-
-
- {:else if !certificateStatus} -
-
- - - -

- No certificate found for this node -

-

- This node has not registered with Puppetserver yet. -

-
- - -
-
- - - -
-

How to register this node

-
    -
  1. Install the Puppet agent on the node
  2. -
  3. Configure the agent to point to your Puppetserver
  4. -
  5. Run puppet agent -t to generate a certificate request
  6. -
  7. Sign the certificate request in Puppetserver or return to this page to sign it
  8. -
-
-
-
-
- {:else} -
- -
-

Certificate Details

-
-
-
Certname
-
{certificateStatus.certname}
-
-
-
Status
-
- - {#if certificateStatus.status === 'signed'} - - - - {:else if certificateStatus.status === 'requested'} - - - - {:else} - - - - {/if} - {certificateStatus.status} - -
-
- {#if certificateStatus.fingerprint} -
-
Fingerprint
-
{certificateStatus.fingerprint}
-
- {/if} - {#if certificateStatus.not_before} -
-
Valid From
-
{formatTimestamp(certificateStatus.not_before)}
-
- {/if} - {#if certificateStatus.not_after} -
-
Valid Until
-
{formatTimestamp(certificateStatus.not_after)}
-
- {/if} - {#if certificateStatus.dns_alt_names && certificateStatus.dns_alt_names.length > 0} -
-
DNS Alt Names
-
- {certificateStatus.dns_alt_names.join(', ')} -
-
- {/if} -
-
- - - {#if certificateStatus.status === 'requested'} -
-
- - - -
-

Certificate Pending

-

- This certificate request is waiting to be signed. Sign it below to allow this node to communicate with Puppetserver. -

-
-
-
- {:else if certificateStatus.status === 'revoked'} -
-
- - - -
-

Certificate Revoked

-

- This certificate has been revoked and can no longer be used. The node will not be able to communicate with Puppetserver until a new certificate is issued. -

-
-
-
- {:else if certificateStatus.status === 'signed'} -
-
- - - -
-

Certificate Active

-

- This certificate is signed and active. The node can communicate with Puppetserver. -

-
-
-
- {/if} - - -
- {#if certificateStatus.status === 'requested'} - - {/if} - {#if certificateStatus.status === 'signed'} - - {/if} -
-
- {/if} -
- {/if} - {#if activePuppetSubTab === 'catalog'} diff --git a/frontend/src/pages/PuppetPage.svelte b/frontend/src/pages/PuppetPage.svelte index 8c05c63..39b7a05 100644 --- a/frontend/src/pages/PuppetPage.svelte +++ b/frontend/src/pages/PuppetPage.svelte @@ -13,7 +13,7 @@ import CodeAnalysisTab from '../components/CodeAnalysisTab.svelte'; // Tab types - type TabId = 'environments' | 'reports' | 'certificates' | 'status' | 'admin' | 'hiera' | 'analysis'; + type TabId = 'environments' | 'reports' | 'status' | 'admin' | 'hiera' | 'analysis'; // State let activeTab = $state('environments'); @@ -137,7 +137,7 @@ case 'reports': await fetchAllReports(); break; - // 'environments' and 'certificates' load their own data + // 'environments' loads its own data } } @@ -146,7 +146,7 @@ const url = new URL(window.location.href); const tabParam = url.searchParams.get('tab') as TabId | null; - if (tabParam && ['environments', 'reports', 'certificates', 'status', 'admin', 'hiera', 'analysis'].includes(tabParam)) { + if (tabParam && ['environments', 'reports', 'status', 'admin', 'hiera', 'analysis'].includes(tabParam)) { activeTab = tabParam; // Load data for the tab if not already loaded @@ -185,7 +185,7 @@ Puppet

- Manage Puppet environments, reports, and certificates + Manage Puppet environments and reports

@@ -222,20 +222,7 @@ - +