def _loadMemoized(self): self.resultLoadedFromDisk = False try: ResultsMemoizer.loadResult(self) except IOError, e: logMessageOnce( 'No memoization due to IOError (probably because some other process are writing same data): ' + str(e))
def __iter__(self): job = self.job for bin in job._userBinSource: stat = job._statClass(bin, job._track, job._track2, **job._kwArgs) ResultsMemoizer.loadResult(stat) if hasattr(stat, "resultLoadedFromDisk") and stat.resultLoadedFromDisk: continue yield StatisticArgumentPickleWrapper(job._statClass, bin, job._track, job._track2, **job._kwArgs)
def __iter__(self): job = self.job for bin in job._userBinSource: stat = job._statClass(bin, job._track, job._track2, **job._kwArgs) ResultsMemoizer.loadResult(stat) if stat.resultLoadedFromDisk(): continue yield StatisticArgumentPickleWrapper(job._statClass, bin, job._track, job._track2, **job._kwArgs)
def _loadMemoizedResult(self): self.resetResultLoadedFromDiskFlag() ResultsMemoizer.loadResult(self)
def _loadMemoized(self): self.resultLoadedFromDisk = False try: ResultsMemoizer.loadResult(self) except IOError, e: logMessageOnce('No memoization due to IOError (probably because some other process are writing same data): ' + str(e))