diff --git a/extension/server/src/connection.ts b/extension/server/src/connection.ts index 37549187..1bbcce57 100644 --- a/extension/server/src/connection.ts +++ b/extension/server/src/connection.ts @@ -24,57 +24,171 @@ connection.onDidChangeWatchedFiles((params: DidChangeWatchedFilesParams) => { watchedFilesChangeEvent.forEach(editEvent => editEvent(params)); }) +/** + * Extracts a clean, decoded filename from a URI for display in logs + * Handles URL encoding (e.g., %23 -> #) and removes query parameters + */ +export function getDisplayName(uri: string): string { + const withoutQuery = uri.split('?')[0]; + const fileName = withoutQuery.split('/').pop() || uri; + return decodeURIComponent(fileName); +} + +// Log level constants and names +export const LogLevel = { + NONE: 0, + ERROR: 1, + WARN: 2, + INFO: 3, + DEBUG: 4 +}; + +const LOG_LEVEL_NAMES = ['NONE', 'ERROR', 'WARN', 'INFO', 'DEBUG']; + +let logLevel: number = LogLevel.INFO; // Default to INFO + +export function setLogLevel(level: number) { + logLevel = Math.max(LogLevel.NONE, Math.min(LogLevel.DEBUG, level)); // Clamp between 0-4 + const levelName = LOG_LEVEL_NAMES[logLevel] || 'UNKNOWN'; + logWithTimestamp(`Log level set to ${logLevel} (${levelName})`, LogLevel.NONE); +} + +export async function initializeLogLevel() { + try { + const config = await connection.workspace.getConfiguration('vscode-rpgle'); + const logLevelString = config?.logLevel || 'info'; + + // Map string to numeric level using LOG_LEVEL_NAMES + const levelIndex = LOG_LEVEL_NAMES.findIndex(name => name.toLowerCase() === logLevelString.toLowerCase()); + const level = levelIndex !== -1 ? levelIndex : LogLevel.INFO; + + setLogLevel(level); + } catch (e) { + logWithTimestamp(`Failed to read log level setting, using default of INFO`, LogLevel.WARN); + setLogLevel(LogLevel.INFO); + } +} + +export function logWithTimestamp(message: string, level: number = LogLevel.INFO) { + if (level > logLevel) return; + + const now = new Date(); + const timestamp = now.toTimeString().split(' ')[0] + '.' + now.getMilliseconds().toString().padStart(3, '0'); + const levelName = LOG_LEVEL_NAMES[level] || 'LOG'; + console.log(`[${timestamp}] [${levelName}] ${message}`); +} + export async function validateUri(stringUri: string, scheme = ``) { + const startTime = Date.now(); + // First, check local cache const possibleCachedFile = findFile(stringUri, scheme); - if (possibleCachedFile) return possibleCachedFile; + if (possibleCachedFile) { + const duration = Date.now() - startTime; + logWithTimestamp(`URI validation: ${stringUri} (${duration}ms, local cache)`, LogLevel.DEBUG); + return possibleCachedFile; + } - console.log(`Validating file from server: ${stringUri}`); + logWithTimestamp(`URI validation: ${stringUri} (requesting from server...)`, LogLevel.DEBUG); // Then reach out to the extension to find it const uri: string|undefined = await connection.sendRequest("getUri", stringUri); - if (uri) return uri; + const duration = Date.now() - startTime; + if (uri) { + logWithTimestamp(`URI validation: ${stringUri} -> ${uri} (${duration}ms, server)`, LogLevel.INFO); + return uri; + } + + logWithTimestamp(`URI validation: ${stringUri} (${duration}ms, NOT FOUND)`, LogLevel.WARN); return; } -export async function getFileRequest(uri: string) { +// Track include files being fetched (to skip debounce on open) +export const filesBeingFetchedForIncludes = new Set(); + +export async function getFileRequest(uri: string, skipDebounce: boolean = false) { + const startTime = Date.now(); + const fileName = getDisplayName(uri); + // First, check if it's local const localCacheDoc = documents.get(uri); - if (localCacheDoc) return localCacheDoc.getText(); + if (localCacheDoc) { + const duration = Date.now() - startTime; + logWithTimestamp(`File fetch: ${fileName} (${duration}ms, local documents cache)`, LogLevel.DEBUG); + return localCacheDoc.getText(); + } - console.log(`Fetching file from server: ${uri}`); + logWithTimestamp(`File fetch: ${fileName} (requesting from server...)`, LogLevel.DEBUG); - // If not, then grab it from remote - const body: string|undefined = await connection.sendRequest("getFile", uri); - if (body) { - // TODO.. cache it? - return body; + // If this is an include file fetch, track it to skip debounce + if (skipDebounce) { + filesBeingFetchedForIncludes.add(uri); } - return; + try { + // If not, then grab it from remote + const body: string|undefined = await connection.sendRequest("getFile", uri); + const duration = Date.now() - startTime; + + if (body) { + logWithTimestamp(`File fetch: ${fileName} (${duration}ms, ${body.length} bytes from server)`, LogLevel.INFO); + // TODO.. cache it? + return body; + } + + logWithTimestamp(`File fetch: ${fileName} (${duration}ms, NOT FOUND)`, LogLevel.WARN); + return; + } catch (error) { + const duration = Date.now() - startTime; + logWithTimestamp(`File fetch: ${fileName} (${duration}ms, ERROR: ${error})`, LogLevel.ERROR); + console.error(`File fetch error details for ${uri}:`, error); + return; + } finally { + // Always clean up tracking + if (skipDebounce) { + filesBeingFetchedForIncludes.delete(uri); + } + } } export let resolvedMembers: {[baseUri: string]: {[fileKey: string]: IBMiMember}} = {}; export let resolvedStreamfiles: {[baseUri: string]: {[fileKey: string]: string}} = {}; export async function memberResolve(baseUri: string, member: string, file: string): Promise { + // Normalize baseUri by removing query parameters to ensure consistent cache keys + const normalizedBaseUri = baseUri.split('?')[0]; const fileKey = file+member; + const startTime = Date.now(); + + // Check cache + if (resolvedMembers[normalizedBaseUri] && resolvedMembers[normalizedBaseUri][fileKey]) { + const cached = resolvedMembers[normalizedBaseUri][fileKey]; + const duration = Date.now() - startTime; + logWithTimestamp(`Member resolve CACHE HIT: ${file}/${member} -> ${cached.library}/${cached.file}/${cached.name} (${duration}ms)`, LogLevel.DEBUG); + return cached; + } - if (resolvedMembers[baseUri] && resolvedMembers[baseUri][fileKey]) return resolvedMembers[baseUri][fileKey]; + const baseFileName = getDisplayName(normalizedBaseUri); + logWithTimestamp(`Member resolve CACHE MISS: ${file}/${member} (baseUri=${baseFileName})`, LogLevel.DEBUG); try { const resolvedMember = await queue.add(() => {return connection.sendRequest("memberResolve", [member, file])}) as IBMiMember|undefined; - // const resolvedMember = await connection.sendRequest("memberResolve", [member, file]) as IBMiMember|undefined; + const duration = Date.now() - startTime; if (resolvedMember) { - if (!resolvedMembers[baseUri]) resolvedMembers[baseUri] = {}; - resolvedMembers[baseUri][fileKey] = resolvedMember; + logWithTimestamp(`Member resolve SUCCESS: ${file}/${member} -> ${resolvedMember.library}/${resolvedMember.file}/${resolvedMember.name} (${duration}ms)`, LogLevel.DEBUG); + + if (!resolvedMembers[normalizedBaseUri]) resolvedMembers[normalizedBaseUri] = {}; + resolvedMembers[normalizedBaseUri][fileKey] = resolvedMember; + } else { + logWithTimestamp(`Member resolve NOT FOUND: ${file}/${member} (${duration}ms)`, LogLevel.WARN); } return resolvedMember; } catch (e) { - console.log(`Member resolve failed.`); + const duration = Date.now() - startTime; + logWithTimestamp(`Member resolve ERROR: ${file}/${member} (${duration}ms)`, LogLevel.ERROR); console.log(JSON.stringify({baseUri, member, file})); console.log(e); } @@ -83,8 +197,22 @@ export async function memberResolve(baseUri: string, member: string, file: strin } export async function streamfileResolve(baseUri: string, base: string[]): Promise { + // Normalize baseUri by removing query parameters to ensure consistent cache keys + const normalizedBaseUri = baseUri.split('?')[0]; const baseString = base.join(`-`); - if (resolvedStreamfiles[baseUri] && resolvedStreamfiles[baseUri][baseString]) return resolvedStreamfiles[baseUri][baseString]; + const startTime = Date.now(); + + // Check cache + if (resolvedStreamfiles[normalizedBaseUri] && resolvedStreamfiles[normalizedBaseUri][baseString]) { + const cached = resolvedStreamfiles[normalizedBaseUri][baseString]; + const duration = Date.now() - startTime; + const requestingFile = getDisplayName(normalizedBaseUri); + logWithTimestamp(`Streamfile resolve CACHE HIT: ${base[0]} (requesting: ${requestingFile}) -> ${cached} (${duration}ms)`, LogLevel.DEBUG); + return cached; + } + + const requestingFile = getDisplayName(normalizedBaseUri); + logWithTimestamp(`Streamfile resolve CACHE MISS: ${base[0]} (requesting: ${requestingFile})`, LogLevel.DEBUG); const workspace = await getWorkspaceFolder(baseUri); @@ -92,16 +220,21 @@ export async function streamfileResolve(baseUri: string, base: string[]): Promis try { const resolvedPath = await queue.add(() => {return connection.sendRequest("streamfileResolve", [base, paths])}) as string|undefined; - // const resolvedPath = await connection.sendRequest("streamfileResolve", [base, paths]) as string|undefined; + const duration = Date.now() - startTime; if (resolvedPath) { - if (!resolvedStreamfiles[baseUri]) resolvedStreamfiles[baseUri] = {}; - resolvedStreamfiles[baseUri][baseString] = resolvedPath; + logWithTimestamp(`Streamfile resolve SUCCESS: ${base[0]} (requesting: ${requestingFile}) -> ${resolvedPath} (${duration}ms)`, LogLevel.DEBUG); + + if (!resolvedStreamfiles[normalizedBaseUri]) resolvedStreamfiles[normalizedBaseUri] = {}; + resolvedStreamfiles[normalizedBaseUri][baseString] = resolvedPath; + } else { + logWithTimestamp(`Streamfile resolve NOT FOUND: ${base[0]} (requesting: ${requestingFile}) (${duration}ms)`, LogLevel.WARN); } return resolvedPath; } catch (e) { - console.log(`Streamfile resolve failed.`); + const duration = Date.now() - startTime; + logWithTimestamp(`Streamfile resolve ERROR: ${base[0]} (requesting: ${requestingFile}) (${duration}ms)`, LogLevel.ERROR); console.log(JSON.stringify({baseUri, base, paths})); console.log(e); } diff --git a/extension/server/src/providers/linter/index.ts b/extension/server/src/providers/linter/index.ts index 2c4afd50..97aa568e 100644 --- a/extension/server/src/providers/linter/index.ts +++ b/extension/server/src/providers/linter/index.ts @@ -9,7 +9,7 @@ import Cache from '../../../../../language/models/cache'; import documentFormattingProvider from './documentFormatting'; import * as Project from "../project"; -import { connection, getFileRequest, getWorkingDirectory, resolvedMembers, resolvedStreamfiles, validateUri, watchedFilesChangeEvent } from '../../connection'; +import { connection, getDisplayName, getFileRequest, getWorkingDirectory, resolvedMembers, resolvedStreamfiles, validateUri, watchedFilesChangeEvent } from '../../connection'; import { parseMemberUri } from '../../data'; export let jsonCache: { [uri: string]: string } = {}; diff --git a/extension/server/src/server.ts b/extension/server/src/server.ts index 8809ab44..4d3e5ffa 100644 --- a/extension/server/src/server.ts +++ b/extension/server/src/server.ts @@ -17,7 +17,7 @@ import { URI } from 'vscode-uri'; import completionItemProvider from './providers/completionItem'; import hoverProvider from './providers/hover'; -import { connection, getFileRequest, getObject as getObjectData, handleClientRequests, memberResolve, streamfileResolve, validateUri } from "./connection"; +import { connection, filesBeingFetchedForIncludes, getDisplayName, getFileRequest, getObject as getObjectData, handleClientRequests, initializeLogLevel, LogLevel, memberResolve, streamfileResolve, validateUri, logWithTimestamp } from "./connection"; import * as Linter from './providers/linter'; import { referenceProvider } from './providers/reference'; import Declaration from '../../../language/models/declaration'; @@ -117,6 +117,8 @@ connection.onInitialize((params: InitializeParams) => { }); connection.onInitialized(() => { + initializeLogLevel(); + if (projectEnabled) { Project.initialise(); } @@ -141,12 +143,16 @@ let fetchingInProgress: { [fetchKey: string]: boolean } = {}; parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { const currentUri = URI.parse(stringUri); const uriPath = currentUri.path; + // Extract clean filename without query parameters + const parentFileName = getDisplayName(stringUri); + const fetchStartTime = Date.now(); let cleanString: string | undefined; let validUri: string | undefined; if (!fetchingInProgress[includeString]) { fetchingInProgress[includeString] = true; + logWithTimestamp(`Include fetch started: ${includeString} (from ${parentFileName})`, LogLevel.DEBUG); // Right now we are resolving based on the base file schema. // This is likely bad since you can include across file systems. @@ -203,7 +209,7 @@ parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { // Resolving IFS path from member or streamfile // IFS fetch - + if (cleanString.startsWith(`/`)) { // Path from root validUri = URI.from({ @@ -217,7 +223,7 @@ parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { // - `${cleanString}.rpgleinc` // - `${cleanString}.rpgle` const possibleFiles = [cleanString, `${cleanString}.rpgleinc`, `${cleanString}.rpgle`]; - + // Path from home directory? const foundStreamfile = await streamfileResolve(stringUri, possibleFiles); @@ -281,11 +287,13 @@ parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { } } - fetchingInProgress[includeString] = false; - if (validUri) { - const validSource = await getFileRequest(validUri); + const validSource = await getFileRequest(validUri, true); // true = skip debounce for include files if (validSource) { + const duration = Date.now() - fetchStartTime; + const fileName = getDisplayName(validUri); + logWithTimestamp(`Include fetch completed: ${includeString} -> ${fileName} (${duration}ms, found)`, LogLevel.INFO); + fetchingInProgress[includeString] = false; return { found: true, uri: validUri, @@ -294,14 +302,20 @@ parser.setIncludeFileFetch(async (stringUri: string, includeString: string) => { } } + const duration = Date.now() - fetchStartTime; + logWithTimestamp(`Include fetch completed: ${includeString} (${duration}ms, NOT FOUND)`, LogLevel.WARN); + fetchingInProgress[includeString] = false; + return { + found: false, + uri: validUri + }; + } else { + logWithTimestamp(`Include fetch skipped: ${includeString} (already fetching)`, LogLevel.DEBUG); + return { + found: false, + uri: validUri + }; } - - fetchingInProgress[includeString] = false; - - return { - found: false, - uri: validUri - }; }); if (languageToolsEnabled) { @@ -323,21 +337,131 @@ if (languageToolsEnabled) { if (isLinterEnabled()) Linter.initialise(connection); -// Always get latest stuff -documents.onDidChangeContent(handler => { +// Track parsing state for each document +const documentParseState: { + [uri: string]: { + timer?: NodeJS.Timeout, + parseId: number, + parseStartTime?: number, + isParsing: boolean, + needsReparse: boolean + } +} = {}; + +// Execute a parse for a document +function executeParse(uri: string, parseId: number, document: any) { + const fileName = getDisplayName(uri); + const state = documentParseState[uri]; + + if (!state) return; + + // Mark parse as active + state.isParsing = true; + state.needsReparse = false; + const parseStartTime = Date.now(); + state.parseStartTime = parseStartTime; + logWithTimestamp(`Parse started: ${fileName} (parseId: ${parseId})`, LogLevel.DEBUG); + parser.getDocs( - handler.document.uri, - handler.document.getText(), + uri, + document.getText(), { withIncludes: true, ignoreCache: true, collectReferences: true } ).then(cache => { - if (cache) { - Linter.refreshLinterDiagnostics(handler.document, cache); + const duration = Date.now() - parseStartTime; + const isLatest = parseId === state.parseId; + + // Mark parse as complete + state.isParsing = false; + + // Only update diagnostics if this is still the latest parse + if (cache && isLatest) { + Linter.refreshLinterDiagnostics(document, cache); + logWithTimestamp(`Parse completed: ${fileName} (parseId: ${parseId}, ${duration}ms, diagnostics updated)`, LogLevel.INFO); + } else if (cache) { + logWithTimestamp(`Parse completed: ${fileName} (parseId: ${parseId}, ${duration}ms, STALE - ignored)`, LogLevel.DEBUG); + } else { + logWithTimestamp(`Parse completed: ${fileName} (parseId: ${parseId}, ${duration}ms, no cache)`, LogLevel.DEBUG); + } + + // If a re-parse was queued while this parse was running, trigger it now + if (state.needsReparse) { + state.needsReparse = false; + const latestParseId = state.parseId; + logWithTimestamp(`Triggering queued re-parse for ${fileName} (parseId: ${latestParseId})`, LogLevel.DEBUG); + setTimeout(() => executeParse(uri, latestParseId, document), 0); + } + }).catch(err => { + const duration = Date.now() - parseStartTime; + state.isParsing = false; + logWithTimestamp(`Parse error: ${fileName} (parseId: ${parseId}, ${duration}ms)`, LogLevel.ERROR); + console.error(`Error parsing ${uri}:`, err); + + // If a re-parse was queued, trigger it even after an error + if (state.needsReparse) { + state.needsReparse = false; + const latestParseId = state.parseId; + logWithTimestamp(`Triggering queued re-parse after error for ${fileName} (parseId: ${latestParseId})`, LogLevel.DEBUG); + setTimeout(() => executeParse(uri, latestParseId, document), 0); } }); +} + +// Always get latest stuff +documents.onDidChangeContent(handler => { + const uri = handler.document.uri; + // Extract clean filename without query parameters + const fileName = getDisplayName(uri); + + // Initialize state if needed + if (!documentParseState[uri]) { + documentParseState[uri] = { parseId: 0, isParsing: false, needsReparse: false }; + } + + const state = documentParseState[uri]; + const isFirstOpen = state.parseId === 0; + const isIncludeFile = filesBeingFetchedForIncludes.has(uri); + + // Increment parse ID to invalidate any in-flight parses + state.parseId++; + const currentParseId = state.parseId; + + // Parse immediately without debounce for: + // - Include files (opened during getFileRequest with skipDebounce) + // - Main files on first open + // Use debounce timer for main files being edited + const debounceDelay = (isIncludeFile || isFirstOpen) ? 0 : 300; + + // Clear any existing timer + if (state.timer) { + clearTimeout(state.timer); + logWithTimestamp(`Debounce: Timer reset for ${fileName} (parseId: ${currentParseId})`, LogLevel.DEBUG); + } else if (!isFirstOpen && !isIncludeFile) { + logWithTimestamp(`Debounce: Timer started for ${fileName} (${debounceDelay}ms)`, LogLevel.DEBUG); + } + + // Set a new timer to parse after delay (0ms for includes/first open, 300ms for edits) + state.timer = setTimeout(() => { + delete state.timer; + + if (!isFirstOpen && !isIncludeFile) { + logWithTimestamp(`Debounce: Timer expired for ${fileName}, starting parse (parseId: ${currentParseId})`, LogLevel.DEBUG); + } + + // Check if a parse is already running for this document + if (state.isParsing) { + // A parse is already active - queue a re-parse to run after it completes + state.needsReparse = true; + logWithTimestamp(`Parse queued: ${fileName} (parseId: ${currentParseId}, waiting for active parse to complete)`, LogLevel.DEBUG); + return; + } + + // Execute the parse + executeParse(uri, currentParseId, handler.document); + }, debounceDelay); // 0ms for first open, 300ms for edits }); // Make the text document manager listen on the connection diff --git a/language/parser.ts b/language/parser.ts index 52a06d6a..cfff6208 100644 --- a/language/parser.ts +++ b/language/parser.ts @@ -775,9 +775,13 @@ export default class Parser { case `/INCLUDE`: if (options.withIncludes && this.includeFileFetch && lineCanRun()) { const includePath = Parser.getIncludeFromDirective(line); - + if (includePath) { - const include = await this.includeFileFetch(workingUri, includePath); + // Use fileUri (current file) instead of workingUri (root document) to ensure: + // 1. Correct parent file is logged for nested includes + // 2. Member/streamfile resolution caches are scoped per requesting file + // 3. Workspace context resolution uses the actual requesting file + const include = await this.includeFileFetch(fileUri, includePath); if (include.found && include.uri) { if (!scopes[0].includes.some(inc => inc.toPath === include.uri)) { scopes[0].includes.push({ diff --git a/package-lock.json b/package-lock.json index 25a9e01d..33a4a50d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "vscode-rpgle", - "version": "0.31.1", + "version": "0.33.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "vscode-rpgle", - "version": "0.31.1", + "version": "0.33.2", "hasInstallScript": true, "license": "MIT", "devDependencies": { diff --git a/package.json b/package.json index 701c20d7..d5556561 100644 --- a/package.json +++ b/package.json @@ -40,6 +40,19 @@ "type": "boolean", "default": true, "description": "Whether to show the fixed-format ruler by default." + }, + "vscode-rpgle.logLevel": { + "type": "string", + "default": "info", + "enum": ["none", "error", "warn", "info", "debug"], + "enumDescriptions": [ + "No logging", + "Only errors", + "Errors and warnings", + "Errors, warnings, and info (default)", + "All messages including debug output" + ], + "description": "Controls the verbosity of RPGLE language server logging output." } } },