def loadSAdata(): try: if os.path.exists(BP_SA_CACHE_POOLS): log.info('loading SlaveAlloc pools data from cache') j = json.load(open(BP_SA_CACHE_POOLS)) else: log.info('loading SlaveAlloc pools data') j = json.loads(fetchUrl('%s/pools' % urlSlaveAlloc)) json.dump(j, open(BP_SA_CACHE_POOLS, 'w+')) for pool in j: pools[pool['poolid']] = pool['name'] except: log.error('Error loading SlaveAlloc pools', exc_info=True) try: if os.path.exists(BP_SA_CACHE_DATACENTERS): log.info('loading SlaveAlloc datacenters data from cache') j = json.load(open(BP_SA_CACHE_DATACENTERS)) else: log.info('loading SlaveAlloc datacenters data') j = json.loads(fetchUrl('%s/datacenters' % urlSlaveAlloc)) json.dump(j, open(BP_SA_CACHE_DATACENTERS, 'w+')) for dc in j: datacenters[dc['dcid']] = dc['name'] except: log.error('Error loading SlaveAlloc datacenters', exc_info=True) try: if os.path.exists(BP_SA_CACHE_MASTERS): log.info('loading SlaveAlloc masters data from cache') j = json.load(open(BP_SA_CACHE_MASTERS)) else: log.info('loading SlaveAlloc masters data from cache') j = json.loads(fetchUrl('%s/masters' % urlSlaveAlloc)) json.dump(j, open(BP_SA_CACHE_MASTERS, 'w+')) for master in j: if master['notes'] is None: master['notes'] = '' master['activity'] = [] masters[master['nickname']] = master except: log.error('Error loading SlaveAlloc masters', exc_info=True) try: if os.path.exists(BP_SA_CACHE_SLAVES): log.info('loading SlaveAlloc slaves data from cache') j = json.load(open(BP_SA_CACHE_SLAVES)) else: log.info('loading SlaveAlloc slaves data') j = json.loads(fetchUrl('%s/slaves' % urlSlaveAlloc)) json.dump(j, open(BP_SA_CACHE_SLAVES, 'w+')) for slave in j: slave['activity'] = [] slaves[slave['name']] = slave except: log.error('Error loading SlaveAlloc pools', exc_info=True)
def loadKittenList(options): result = [] if options.kittens.lower() in ('ec2',): for item in db.smembers('farm:%s:active' % options.kittens): itemName = db.hget(item, 'name') if itemName is None: log.info('Skipping bad entry [%s]' % item) else: result.append(db.hget(item, 'name')) elif options.kittens.lower().startswith('http://'): # fetch url, and yes, we assume it's a text file items = fetchUrl(options.kittens) # and then make it iterable if items is not None: result = items.split('\n') elif os.path.exists(options.kittens): result = open(options.kittens, 'r').readlines() elif ',' in options.kittens: result = options.kittens.split(',') else: result.append(options.kittens) return result
def loadKittenList(options): result = [] if options.kittens.lower() in ('ec2', ): for item in db.smembers('farm:%s:active' % options.kittens): itemName = db.hget(item, 'name') if itemName is None: log.info('Skipping bad entry [%s]' % item) else: result.append(db.hget(item, 'name')) elif options.kittens.lower().startswith('http://'): # fetch url, and yes, we assume it's a text file items = fetchUrl(options.kittens) # and then make it iterable if items is not None: result = items.split('\n') elif os.path.exists(options.kittens): result = open(options.kittens, 'r').readlines() elif ',' in options.kittens: result = options.kittens.split(',') else: result.append(options.kittens) return result
# if reFilter is None: # log.error("During this testing phase I'm making it so that --filter is required") # log.error("Please re-run and specify a filter so we don't accidently process all") # log.error("slaves or something silly like that -- thanks (bear)") # sys.exit(1) if options.verbose: log.info('retrieving list of kittens to wrangle') seenCache = loadCache(options.cachefile) kittens = None if options.kittens.lower().startswith('http://'): # fetch url, and yes, we assume it's a text file items = fetchUrl(options.kittens) # and then make it iterable if items is not None: kittens = items.split('\n') else: kittens = [] else: if os.path.exists(options.kittens): kittens = open(options.kittens, 'r').readlines() else: if ',' in options.kittens: kittens = options.kittens.split(',') else: kittens = [] kittens.append(options.kittens)
if __name__ == "__main__": options = initOptions(_defaultOptions) initLogs(options) logging.getLogger("paramiko.transport").setLevel(logging.WARNING) if options.tools is None: options.tools = '/builds/tools' log.info('Starting') # grab and process slavealloc list into a simple dictionary slaves = {} slavelist = json.loads(fetchUrl('%s/slaves' % urlSlaveAlloc)) for item in slavelist: if item['notes'] is None: item['notes'] = '' slaves[item['name']] = item reBug = re.compile(_reBug) remoteEnv = releng.remote.RemoteEnvironment(options.tools, options.username, options.password) for host in options.args: flag = False log.info('processing %s' % host) if not options.dryrun:
if __name__ == "__main__": options = initOptions(_defaultOptions) initLogs(options) logging.getLogger("paramiko.transport").setLevel(logging.WARNING) if options.tools is None: options.tools = '/builds/tools' log.info('Starting') # grab and process slavealloc list into a simple dictionary slaves = {} slavelist = json.loads(fetchUrl('%s/slaves' % urlSlaveAlloc)) for item in slavelist: if item['notes'] is None: item['notes'] = '' slaves[item['name']] = item reBug = re.compile(_reBug) remoteEnv = releng.remote.RemoteEnvironment(options.tools, options.username, options.password) for host in options.args: flag = False log.info('processing %s' % host) if not options.dryrun: if slaves[host]['enabled']: notes = slaves[host]['notes']