def getChangedFiles(self): ''' Slower than haveFilesChanged. ''' if self.lastTargetHash == '': self.readCache() o = [] if self.CHECK_MTIMES: curFileTimes = self.serialize_file_times() for filename, mtime in self.lastFileTimes.items(): filename=os.path.abspath(filename) if filename not in curFileTimes.keys(): log.debug('File %s is currently missing.', filename) o += [filename] if curFileTimes[filename] != mtime: log.debug('File %s has a changed mtime.', filename) o += [filename] if self.CHECK_HASHES: curFileHashes = self.serialize_file_hashes() for filename, hashed in self.lastFileHashes.items(): filename=os.path.abspath(filename) if filename not in curFileHashes.keys(): log.debug('File %s is currently missing.', filename) o += [filename] if curFileHashes[filename] != hashed: log.debug('File %s has a changed hash.', filename) o += [filename] for filename, mtime in curFileTimes.items(): filename=os.path.abspath(filename) if filename not in self.lastFileTimes.keys(): log.debug('File %s is new.', filename) o += [filename] return o
def checkMTimes(self, inputs, targets, config=None): inputs=callLambda(inputs) for target in targets: if not os.path.isfile(target): log.debug('%s does not exist.', target) return True if config is not None: s = StringIO() yaml.dump(config, s) configHash = hashlib.md5(s.getvalue().encode('utf-8')).hexdigest() targetHash = hashlib.md5(';'.join(targets).encode('utf-8')).hexdigest() def writeHash(): with open(configcachefile, 'w') as f: f.write(configHash) os_utils.ensureDirExists('.build') configcachefile = os.path.join('.build', targetHash) if not os.path.isfile(configcachefile): writeHash() log.debug('%s: Target cache doesn\'t exist.', self.name) return True oldConfigHash = '' with open(configcachefile, 'r') as f: oldConfigHash = f.readline().strip() if(oldConfigHash != configHash): writeHash() log.debug('%s: Target config changed.', self.name) return True target_mtime = 0 # must be higher newest_target = None inputs_mtime = 0 newest_input = None for infilename in targets: infilename = callLambda(infilename) if os.path.isfile(infilename): c_mtime = os.path.getmtime(infilename) # log.info("%d",input_mtime-target_mtime) if c_mtime > target_mtime: target_mtime = c_mtime newest_target = infilename for infilename in inputs: infilename = callLambda(infilename) if os.path.isfile(infilename): c_mtime = os.path.getmtime(infilename) # log.info("%d",input_mtime-target_mtime) if c_mtime > inputs_mtime: inputs_mtime = c_mtime newest_input = infilename if newest_input is None or target_mtime <= inputs_mtime: log.debug("%s is newer than %s by %ds!", newest_input, newest_target, inputs_mtime - target_mtime) return True else: log.debug("%s is older than %s by %ds!", newest_input, newest_target, target_mtime - inputs_mtime) return False
def __init__(self, hkey, key): log.debug('Python version: 0x%0.8X' % sys.hexversion) if sys.hexversion > 0x03000000: import winreg #IGNORE:import-error else: import _winreg as winreg #IGNORE:import-error self.winreg = winreg self.hkey = hkey self.key = key self._reg = None self._key = None
def canBuild(self, maestro, keys): #self.files = list(callLambda(self.files)) #for dep in list(set(self.dependencies + self.files)): if not self._lambdas_called: for reqfile in callLambda(self.files): if reqfile in keys and reqfile not in self.dependencies: self.dependencies.append(reqfile) for dep in list(set(self.dependencies)): if dep not in maestro.targetsCompleted: log.debug('%s: Waiting on %s.',self.name,dep) return False log.debug('%s: CAN BUILD!',self.name) return True
def checkForCycles(self): with log.info('Checking for dependency cycles...'): # Using Tarjan's Strongly Connected Cycles algorithm tg = TarjanGraph() # First, I need to convert all BuildTargets to TarjanGraphVertexes. for bt in self.alltargets: refs = [] for depend in bt.dependencies: if not isinstance(depend, str): log.critical( 'Build target %s has invalid dependency %s.', bt.name, depend) sys.exit(1) providers = [] for obt in self.alltargets: if depend in obt.provides(): #log.info('%s provides %s, which %s needs', obt.name, depend, bt.name) providers += [obt] if len(providers) > 1: log.warning( 'Build target %s has %d providers for dependency %s: %r', bt.name, len(providers), depend, [x.name for x in providers]) elif len(providers) == 0: log.critical( 'Build target %s has no providers for dependency %s: %r', bt.name, depend, [x.name for x in providers]) sys.exit(1) refs.append(providers[-1].ID) with log.debug('Dependency tree:'): with log.debug('[%s] (%d,[%s])', bt.name, bt.ID, ', '.join([str(x) for x in refs])): for refID in refs: log.debug(self.alltargets[refID].name) tg.add_edge(bt.ID, refs) # Run the algo tg.SCC() # Sort through the crap that falls out foundCycles = False for cycle in tg.cycles: if len(cycle) > 1: log.critical('CYCLE FOUND: %r', [ '#{} ({})'.format(self.alltargets[btid].ID, self.alltargets[btid].name) for btid in cycle ]) foundCycles = True return foundCycles
def __init__(self, scope): log.debug('Python version: 0x%0.8X' % sys.hexversion) if sys.hexversion > 0x03000000: import winreg #IGNORE:import-error else: import _winreg as winreg #IGNORE:import-error self.winreg = winreg assert scope in ('user', 'system') self.scope = scope if scope == 'user': self.root = self.winreg.HKEY_CURRENT_USER self.subkey = 'Environment' else: self.root = self.winreg.HKEY_LOCAL_MACHINE self.subkey = 'SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment'
def is_stale(self): if self.lastTargetHash == '': self.readCache() if self.getTargetHash() != self.lastTargetHash: with log.debug('[is stale] Target hash changed'): log.debug('self.getTargetHash(): %r', self.getTargetHash()) log.debug('self.lastTargetHash: %r', self.lastTargetHash) return True if self.getConfigHash() != self.lastConfigHash: log.debug('[is stale] Config hash changed') return True if self.haveFilesChanged(): #self.checkMTimes(self.files+self.dependencies, self.provides(), config=self.get_config()) return True return False
def run(self, verbose=None): if verbose is not None: self.verbose = verbose new_targets = [] for t in self.targets: if t in new_targets: log.warn('Target %s added more than once.', t) else: new_targets.append(t) if self.checkForCycles(): return keys = [] alldeps = [] for target in self.alltargets: keys += target.provides() alldeps += target.dependencies target.built = False alldeps = list(set(alldeps)) # Redundant #for target in self.alltargets: # for reqfile in callLambda(target.files): # if reqfile in keys and reqfile not in target.dependencies: # target.dependencies.append(reqfile) loop = 0 #progress = tqdm(total=len(self.targets), unit='target', desc='Building', leave=False) self.targetsCompleted = [] self.targetsDirty = [] while len(self.targets) > len(self.targetsCompleted) and loop < 100: loop += 1 for bt in self.alltargets: bt.maestro = self if bt.canBuild(self, keys) and any([ target not in self.targetsCompleted for target in bt.provides() ]): try: bt.try_build() # progress.update(1) self.targetsCompleted += bt.provides() if bt.dirty: self.targetsDirty += bt.provides() except Exception as e: bt._set_failed() self._write_targets() log.critical('An exception occurred, build halted.') log.exception(e) return except KeyboardInterrupt: bt._set_failed() self._write_targets() log.critical('Cancelled via KeyboardInterrupt.') return bt.built = True log.debug('%d > %d, loop = %d', len(self.targets), len(self.targetsCompleted), loop) log.debug('%d > %d, loop = %d', len(self.targets), len(self.targetsCompleted), loop) # progress.close() self._write_targets() if loop >= 100: incompleteTargets = [ t for t in self.targets if t not in self.targetsCompleted ] if len(incompleteTargets) > 0: with log.critical( "Failed to resolve dependencies. The following targets are left unresolved. Exiting." ): for t in incompleteTargets: log.critical(t) orphanDeps = [t for t in alldeps if t not in self.targets] if len(orphanDeps) > 0: with log.critical( "Failed to resolve dependencies. The following dependencies are orphaned. Exiting." ): for t in orphanDeps: log.critical(t) #sys.exit(1) with log.info('Cleaning up...'): cachefiles = [] for bt in self.alltargets: cachefiles.append(os.path.basename(bt.getCacheFile())) for filename in os.listdir(os.path.join(self.builddir, 'cache')): if filename not in cachefiles: filename = os.path.join(self.builddir, 'cache', filename) log.debug('<red>RM</red> %s', filename) os.remove(filename)
def haveFilesChanged(self): if self.lastTargetHash == '': self.readCache() curFileTimes = self.serialize_file_times() if self.CHECK_MTIMES: for filename, mtime in self.lastFileTimes.items(): filename=os.path.abspath(filename) if filename not in curFileTimes.keys(): log.debug('File %s is currently missing.', filename) return True if abs(curFileTimes[filename] - mtime) > 0.1: log.debug('File %s has a changed mtime. abs(%d - %d) > 1', filename, curFileTimes[filename], mtime) return True if self.CHECK_HASHES: curFileHashes = self.serialize_file_hashes() for filename, hashed in self.lastFileHashes.items(): filename=os.path.abspath(filename) if filename not in curFileHashes.keys(): log.debug('File %s is currently missing.', filename) return True if curFileHashes[filename] != hashed: log.debug('File %s has a changed hash. (%s != %s)', filename, curFileHashes[filename], hashed) return True for filename, mtime in curFileTimes.items(): filename=os.path.abspath(filename) if filename in self.maestro.targetsDirty: log.debug('File %s was dirtied by another BuildTarget.', filename) return True if filename not in self.lastFileTimes.keys(): log.debug('File %s is new.', filename) return True return False