def check_except(name): log = logging.getLogger(name) total_uptime = 0. total_downtime = 0. run_time = 0. counter = 0 up = False uptxt = 'DOWN' ts = dt.now() log.info('START,%f,%f,%f', total_uptime, total_downtime, run_time) while True: ts = dt.now() time.sleep(.5) try: client = RedisClient(name) status = client.ping() if counter == 0: client.set('testcounter', 1) else: client.incr('testcounter') counter += 1 testcounter = int(client.get('testcounter')) assert (counter == testcounter) except redis.RedisError as e: print(' REDIS ERROR ===> ', e.__name__) status = False except OverlayNotAvailable as e: print(' OVERLAY not available') status = False delta = (dt.now() - ts).total_seconds() if status == up: run_time += delta else: print('STATUS Change from %s' % uptxt) log.info('%s,%f,%f,%f', uptxt, total_uptime, total_downtime, run_time) run_time = 0. if status: uptxt = 'UP' total_uptime += delta else: uptxt = 'DOWN' total_downtime += delta print('%s,%f' % (uptxt, run_time)) up = status
def check_except(name): log = logging.getLogger(name) total_uptime = 0. total_downtime = 0. run_time = 0. counter = 0 up = False uptxt = 'DOWN' ts = dt.now() log.info('START,%f,%f,%f', total_uptime, total_downtime, run_time) while True: ts = dt.now() time.sleep(.5) try: client = RedisClient(name) status = client.ping() if counter == 0: client.set('testcounter', 1) else: client.incr('testcounter') counter += 1 testcounter = int(client.get('testcounter')) assert (counter == testcounter) except redis.RedisError as e: print(' REDIS ERROR ===> ', e.__name__) status = False except OverlayNotAvailable as e: print(' OVERLAY not available') status = False delta = (dt.now()-ts).total_seconds() if status == up: run_time += delta else: print('STATUS Change from %s' % uptxt) log.info('%s,%f,%f,%f', uptxt, total_uptime, total_downtime, run_time) run_time = 0. if status: uptxt = 'UP' total_uptime += delta else: uptxt = 'DOWN' total_downtime += delta print('%s,%f' % (uptxt, run_time)) up = status
catalog.rpush('jcqueue', jcID) catalog.hmset(wrapKey('jc', jcID), config) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('name', default='default') parser.add_argument('--initcatalog', action='store_true') parser.add_argument('--lattice', action='store_true') parser.add_argument('--centroid', action='store_true') args = parser.parse_args() confile = args.name + '.json' settings = systemsettings() settings.applyConfig(confile) catalog = RedisClient(args.name) if args.initcatalog: settings.envSetup() initialize.initializecatalog(catalog) if args.lattice: numresources = int(catalog.get('numresources')) bootstrap_lattice(catalog, numresources) if args.centroid: start_coords = [(2586,315), (531,20), (3765,63), (3305,668), (1732,139)] for fileno, frame in start_coords: initialize.manual_de_job(catalog, fileno, frame)
# TO Recalculate PCA Vectors from DEShaw (~30-40 mins at 10% of data) # calcDEShaw_PCA(catalog) # sys.exit(0) if args.centroid: centroid_bootstrap(catalog) if args.initcatalog or args.all: settings.envSetup() initializecatalog(catalog) if args.onejob: makejobconfig(catalog) if args.seedjob or args.all: numresources = int(catalog.get('numresources')) initialJobPerBin = max(1, numresources//25) if args.exact: seedJob_Uniform(catalog, num=initialJobPerBin, exact=numresources) else: seedJob_Uniform(catalog, num=initialJobPerBin) if args.updateschema: # archive = redisCatalog.dataStore(**DEFAULT.archiveConfig) updateschema(catalog) if args.reset: resetAnalysis(catalog) if args.initpca: load_PCA_Subspace(catalog)