diff --git a/clcache/__main__.py b/clcache/__main__.py index 8b01591b..7f9f48a1 100644 --- a/clcache/__main__.py +++ b/clcache/__main__.py @@ -74,7 +74,7 @@ # `objectHash`: hash of the object in cache ManifestEntry = namedtuple('ManifestEntry', ['includeFiles', 'includesContentHash', 'objectHash']) -CompilerArtifacts = namedtuple('CompilerArtifacts', ['objectFilePath', 'stdout', 'stderr']) +CompilerArtifacts = namedtuple('CompilerArtifacts', ['objectFilePath', 'pchFilePath', 'stdout', 'stderr']) def printBinary(stream, rawData): with OUTPUT_LOCK: @@ -231,6 +231,14 @@ def allSectionsLocked(repository): section.lock.release() +def readPCHHash(pchFile): + with open(pchFile+".clcache", 'r') as f: + return f.read() + +def writePCHHash(pchFile, hash): + with open(pchFile+".clcache", 'w') as f: + f.write('{}'.format(hash)) + class ManifestRepository(object): # Bump this counter whenever the current manifest file format changes. # E.g. changing the file format from {'oldkey': ...} to {'newkey': ...} requires @@ -293,6 +301,10 @@ def getManifestHash(compilerBinary, commandLine, sourceFile): additionalData = "{}|{}|{}".format( compilerHash, commandLine, ManifestRepository.MANIFEST_FILE_FORMAT_VERSION) + + if 'Yu' in arguments: + pchFile = CommandLineAnalyzer.getPchFileName(arguments) + additionalData += readPCHHash(pchFile) return getFileHash(sourceFile, additionalData) @staticmethod @@ -366,6 +378,7 @@ def forPath(path): class CompilerArtifactsSection(object): OBJECT_FILE = 'object' + PCH_FILE = 'pch' STDOUT_FILE = 'output.txt' STDERR_FILE = 'stderr.txt' @@ -395,6 +408,10 @@ def setEntry(self, key, artifacts): if artifacts.objectFilePath is not None: copyOrLink(artifacts.objectFilePath, os.path.join(tempEntryDir, CompilerArtifactsSection.OBJECT_FILE)) + if artifacts.pchFilePath is not None: + copyOrLink(artifacts.pchFilePath, + os.path.join(tempEntryDir, CompilerArtifactsSection.PCH_FILE)) + writePCHHash(artifacts.pchFilePath, key) setCachedCompilerConsoleOutput(os.path.join(tempEntryDir, CompilerArtifactsSection.STDOUT_FILE), artifacts.stdout) if artifacts.stderr != '': @@ -408,6 +425,7 @@ def getEntry(self, key): cacheEntryDir = self.cacheEntryDir(key) return CompilerArtifacts( os.path.join(cacheEntryDir, CompilerArtifactsSection.OBJECT_FILE), + os.path.join(cacheEntryDir, CompilerArtifactsSection.PCH_FILE), getCachedCompilerConsoleOutput(os.path.join(cacheEntryDir, CompilerArtifactsSection.STDOUT_FILE)), getCachedCompilerConsoleOutput(os.path.join(cacheEntryDir, CompilerArtifactsSection.STDERR_FILE)) ) @@ -776,9 +794,6 @@ def registerCallWithMultipleSourceFiles(self): def numCallsWithPch(self): return self._stats[Statistics.CALLS_WITH_PCH] - def registerCallWithPch(self): - self._stats[Statistics.CALLS_WITH_PCH] += 1 - def numCallsForLinking(self): return self._stats[Statistics.CALLS_FOR_LINKING] @@ -871,10 +886,6 @@ class CalledForLinkError(AnalysisError): pass -class CalledWithPchError(AnalysisError): - pass - - class ExternalDebugInfoError(AnalysisError): pass @@ -1266,6 +1277,18 @@ def parseArgumentsAndInputFiles(cmdline): return dict(arguments), inputFiles + @staticmethod + def getPchFileName(options): + if 'Fp' in options: + return options['Fp'][0] + if 'Yc' in options: + option = options['Yc'][0] + elif 'Yu' in options: + option = options['Yu'][0] + else: + return None + return basenameWithoutExtension(option) + '.pch' + @staticmethod def analyze(cmdline: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: options, inputFiles = CommandLineAnalyzer.parseArgumentsAndInputFiles(cmdline) @@ -1294,9 +1317,6 @@ def analyze(cmdline: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: if 'Zi' in options: raise ExternalDebugInfoError() - if 'Yc' in options or 'Yu' in options: - raise CalledWithPchError() - if 'link' in options or 'c' not in options: raise CalledForLinkError() @@ -1316,6 +1336,11 @@ def analyze(cmdline: List[str]) -> Tuple[List[Tuple[str, str]], List[str]]: # Generate from .c/.cpp filenames objectFiles = [os.path.join(prefix, basenameWithoutExtension(f)) + '.obj' for f, _ in inputFiles] + if 'Yc' in options: + assert len(objectFiles) == 1 + pchFile = CommandLineAnalyzer.getPchFileName(options) + objectFiles = [(objectFiles[0], pchFile)] + printTraceStatement("Compiler source files: {}".format(inputFiles)) printTraceStatement("Compiler object file: {}".format(objectFiles)) return inputFiles, objectFiles @@ -1397,8 +1422,7 @@ def printStatistics(cache): called for linking : {} called for external debug : {} called w/o source : {} - called w/ multiple sources : {} - called w/ PCH : {}""".strip() + called w/ multiple sources : {}""".strip() with cache.statistics.lock, cache.statistics as stats, cache.configuration as cfg: print(template.format( @@ -1417,7 +1441,6 @@ def printStatistics(cache): stats.numCallsForExternalDebugInfo(), stats.numCallsWithoutSourceFile(), stats.numCallsWithMultipleSourceFiles(), - stats.numCallsWithPch(), )) @@ -1481,7 +1504,8 @@ def addObjectToCache(stats, cache, cachekey, artifacts): printTraceStatement("Adding file {} to cache using key {}".format(artifacts.objectFilePath, cachekey)) cache.setEntry(cachekey, artifacts) - stats.registerCacheEntry(os.path.getsize(artifacts.objectFilePath)) + stats.registerCacheEntry(os.path.getsize(artifacts.objectFilePath) + + os.path.getsize(artifacts.pchFilePath) if artifacts.pchFilePath else 0) with cache.configuration as cfg: return stats.currentCacheSize() >= cfg.maximumCacheSize() @@ -1489,6 +1513,7 @@ def addObjectToCache(stats, cache, cachekey, artifacts): def processCacheHit(cache, objectFile, cachekey): printTraceStatement("Reusing cached object for key {} for object file {}".format(cachekey, objectFile)) + objectFile, pchFile = objectFile if isinstance(objectFile, tuple) else (objectFile, None) with cache.lockFor(cachekey): with cache.statistics.lock, cache.statistics as stats: @@ -1499,6 +1524,10 @@ def processCacheHit(cache, objectFile, cachekey): cachedArtifacts = cache.getEntry(cachekey) copyOrLink(cachedArtifacts.objectFilePath, objectFile) + if pchFile is not None: + copyOrLink(cachedArtifacts.pchFilePath, pchFile) + writePCHHash(pchFile, cachekey) + printTraceStatement("Finished. Exit code 0") return 0, cachedArtifacts.stdout, cachedArtifacts.stderr, False @@ -1611,9 +1640,6 @@ def processCompileRequest(cache, compiler, args): except MultipleSourceFilesComplexError: printTraceStatement("Cannot cache invocation as {}: multiple source files found".format(cmdLine)) updateCacheStatistics(cache, Statistics.registerCallWithMultipleSourceFiles) - except CalledWithPchError: - printTraceStatement("Cannot cache invocation as {}: precompiled headers in use".format(cmdLine)) - updateCacheStatistics(cache, Statistics.registerCallWithPch) except CalledForLinkError: printTraceStatement("Cannot cache invocation as {}: called for linking".format(cmdLine)) updateCacheStatistics(cache, Statistics.registerCallForLinking) @@ -1756,13 +1782,15 @@ def processNoDirect(cache, objectFile, compiler, cmdLine, environment): def ensureArtifactsExist(cache, cachekey, reason, objectFile, compilerResult, extraCallable=None): cleanupRequired = False returnCode, compilerOutput, compilerStderr = compilerResult + objectFile, pchFile = objectFile if isinstance(objectFile, tuple) else (objectFile, None) + correctCompiliation = (returnCode == 0 and os.path.exists(objectFile)) with cache.lockFor(cachekey): if not cache.hasEntry(cachekey): with cache.statistics.lock, cache.statistics as stats: reason(stats) if correctCompiliation: - artifacts = CompilerArtifacts(objectFile, compilerOutput, compilerStderr) + artifacts = CompilerArtifacts(objectFile, pchFile, compilerOutput, compilerStderr) cleanupRequired = addObjectToCache(stats, cache, cachekey, artifacts) if extraCallable and correctCompiliation: extraCallable()