diff --git a/change/change-8f44ede0-283e-4c95-bd01-a6e5d6c22499.json b/change/change-8f44ede0-283e-4c95-bd01-a6e5d6c22499.json new file mode 100644 index 000000000..9f711befd --- /dev/null +++ b/change/change-8f44ede0-283e-4c95-bd01-a6e5d6c22499.json @@ -0,0 +1,11 @@ +{ + "changes": [ + { + "type": "patch", + "comment": "Include filename and dep names in hash calculations", + "packageName": "@lage-run/hasher", + "email": "dobes@formative.com", + "dependentChangeType": "patch" + } + ] +} \ No newline at end of file diff --git a/packages/hasher/src/FileHasher.ts b/packages/hasher/src/FileHasher.ts index 1f7ebc684..9aa8e41a2 100644 --- a/packages/hasher/src/FileHasher.ts +++ b/packages/hasher/src/FileHasher.ts @@ -107,12 +107,18 @@ export class FileHasher { const updatedHashes = fastHash(updatedFiles, { cwd: this.options.root, concurrency: 4 }) ?? {}; for (const [file, hash] of Object.entries(updatedHashes)) { - const stat = fs.statSync(path.join(this.options.root, file), { bigint: true }); - this.#store[file] = { - mtime: stat.mtimeMs, - size: Number(stat.size), - hash: hash ?? "", - }; + try { + const stat = fs.statSync(path.join(this.options.root, file), { bigint: true }); + this.#store[file] = { + mtime: stat.mtimeMs, + size: Number(stat.size), + hash: hash ?? "", + }; + } catch(e) { + if(e.code !== "ENOENT") { + throw e; + } + } hashes[file] = hash ?? ""; } diff --git a/packages/hasher/src/PackageTree.ts b/packages/hasher/src/PackageTree.ts index e770093bc..75ee26202 100644 --- a/packages/hasher/src/PackageTree.ts +++ b/packages/hasher/src/PackageTree.ts @@ -121,14 +121,26 @@ export class PackageTree { const key = `${packageName}\0${patterns.join("\0")}`; if (!this.#memoizedPackageFiles[key]) { - const packagePatterns = patterns.map((pattern) => { - if (pattern.startsWith("!")) { - return `!${path.join(packagePath, pattern.slice(1)).replace(/\\/g, "/")}`; + const packagePatterns: string[] = []; + const simplePaths: string[] = []; + for(const pattern of patterns) { + // If the input is a pattern, we have to run micromatch to convert that into a list of files + if(/[{}*?\[\]!+()]|@\(/.test(pattern)) { + if (pattern.startsWith("!")) { + packagePatterns.push(`!${path.join(packagePath, pattern.slice(1)).replace(/\\/g, "/")}`); + } else { + packagePatterns.push(path.join(packagePath, pattern).replace(/\\/g, "/")); + } + } else { + // No special characters, so no need to do pattern matching, just take the file exactly as is, and + // assume it could/should exist + simplePaths.push(path.join(packagePath, pattern)); } - - return path.join(packagePath, pattern).replace(/\\/g, "/"); - }); - this.#memoizedPackageFiles[key] = micromatch(packageFiles, packagePatterns, { dot: true }); + } + this.#memoizedPackageFiles[key] = [ + ...simplePaths, + ...(packagePatterns.length ? micromatch(packageFiles, packagePatterns, { dot: true }) : []), + ].sort(); } return this.#memoizedPackageFiles[key]; diff --git a/packages/hasher/src/TargetHasher.ts b/packages/hasher/src/TargetHasher.ts index cb3f6b7f0..d2e4bb686 100644 --- a/packages/hasher/src/TargetHasher.ts +++ b/packages/hasher/src/TargetHasher.ts @@ -174,9 +174,9 @@ export class TargetHasher { } const files = await globAsync(target.inputs, { cwd: root }); - const fileFashes = hash(files, { cwd: root }) ?? {}; + const fileHashes = hash(files, { cwd: root }) ?? {}; - const hashes = Object.values(fileFashes) as string[]; + const hashes = Object.entries(fileHashes).map(p => p.join(' ')); return hashStrings(hashes); } @@ -201,18 +201,20 @@ export class TargetHasher { const fileHashes = this.fileHasher.hash(files) ?? {}; // this list is sorted by file name // get target hashes - const targetDepHashes = target.dependencies?.sort().map((targetDep) => this.targetHashes[targetDep]); + const targetDepHashes = target.dependencies?.sort().map( + (targetDep) => [targetDep, this.targetHashes[targetDep]].join(' ') + ); const globalFileHashes = await this.getEnvironmentGlobHashes(root, target); const combinedHashes = [ // Environmental hashes - ...Object.values(globalFileHashes), + ...Object.entries(globalFileHashes).map(p => p.join(' ')), `${target.id}|${JSON.stringify(this.options.cliArgs)}`, this.options.cacheKey || "", // File content hashes based on target.inputs - ...Object.values(fileHashes), + ...Object.entries(fileHashes).map(p => p.join(' ')), // Dependency hashes ...resolvedDependencies, diff --git a/packages/hasher/src/__tests__/TargetHasher.test.ts b/packages/hasher/src/__tests__/TargetHasher.test.ts index f8411792d..75c4b090f 100644 --- a/packages/hasher/src/__tests__/TargetHasher.test.ts +++ b/packages/hasher/src/__tests__/TargetHasher.test.ts @@ -103,6 +103,29 @@ describe("The main Hasher class", () => { monorepo2.cleanup(); }); + it("creates different hashes when a src file identified without any wildcard is changed", async () => { + const monorepo1 = await setupFixture("monorepo"); + const hasher = new TargetHasher({ root: monorepo1.root, environmentGlob: [] }); + const target = createTarget(monorepo1.root, "package-a", "build"); + target.inputs = ["package.json", "src/index.ts"]; + const hash = await getHash(hasher, target); + + const monorepo2 = await setupFixture("monorepo"); + const hasher2 = new TargetHasher({ root: monorepo2.root, environmentGlob: [] }); + const target2 = createTarget(monorepo2.root, "package-a", "build"); + target2.inputs = ["package.json", "src/index.ts"]; + + await monorepo2.commitFiles({ "packages/package-a/src/index.ts": "console.log('hello world');" }); + + const hash2 = await getHash(hasher2, target2); + + expect(hash).not.toEqual(hash2); + + monorepo1.cleanup(); + monorepo2.cleanup(); + }); + + it("creates different hashes when a src file has changed for a dependency", async () => { const monorepo1 = await setupFixture("monorepo-with-deps"); const hasher = new TargetHasher({ root: monorepo1.root, environmentGlob: [] }); @@ -172,4 +195,40 @@ describe("The main Hasher class", () => { monorepo1.cleanup(); }); + + it("creates different hashes when file path is different but files do not exist", async () => { + const monorepo1 = await setupFixture("monorepo-with-global-files"); + const hasher = new TargetHasher({ root: monorepo1.root, environmentGlob: [] }); + const target = createTarget(monorepo1.root, "package-a", "build"); + target.inputs = ["file1.txt"]; + const target2 = createTarget(monorepo1.root, "package-a", "build"); + target2.inputs = ["file2.txt"]; + + const hash = await getHash(hasher, target); + const hash2 = await getHash(hasher, target2); + + expect(hash).not.toEqual(hash2); + + monorepo1.cleanup(); + }); + + it("creates different hashes when file path is different but file content is the same", async () => { + const content = "THIS IS CONTENT"; + const monorepo1 = await setupFixture("monorepo-with-global-files"); + const hasher = new TargetHasher({ root: monorepo1.root, environmentGlob: [] }); + const target = createTarget(monorepo1.root, "package-a", "build"); + target.inputs = ["file1.txt"]; + fs.writeFileSync(path.join(monorepo1.root, "packages", "package-a", "file1.txt"), content); + const target2 = createTarget(monorepo1.root, "package-a", "build"); + target2.inputs = ["file2.txt"]; + fs.writeFileSync(path.join(monorepo1.root, "packages", "package-a", "file2.txt"), content); + + const hash = await getHash(hasher, target); + const hash2 = await getHash(hasher, target2); + + expect(hash).not.toEqual(hash2); + + monorepo1.cleanup(); + }); + });