cargs = cli.CommandLine(cli.optsfile('chgpt')) args = cargs.args oneday = round(constant.day / constant.minute) window = nd.Window(args.window_obs, args.window_pred, args.window_trgt) if args.resume: with open(args.resume, mode='rb') as fp: observations = pickle.load(fp) (measurements, nodes) = data.cleanse(observations) else: db.genop(args.reporting) opts = [window, oneday, args.threshold, np.mean] with Pool() as pool: observations = pool.starmap(f, nd.nodegen(opts)) observations = list(filter(None, observations)) assert (observations) if args.pickle: with open(args.pickle, mode='wb') as fp: pickle.dump(observations, fp) if args.figures: # and args.verbose: aggregate = [] for i in range(oneday): vals = [x[i] for x in observations] aggregate.append(np.mean(vals)) aggregate.append(0) # aggregate = measurements.mean(axis=0).tolist() + [ 0 ] # log.debug(len(measurements), len(aggregate))
log = Logger().log log.info('{0}: setup +'.format(node)) with DatabaseConnection() as conn: source = nd.Node(node, conn) neighbors = [ nd.Node(x, conn) for x in source.neighbors ] log.info('{0}: setup -'.format(node)) classes = [ WindowInfluence ] # [ MinuteInfluence, WindowInfluence ] return [ i(source, neighbors, cargs).run() for i in classes ] with Pool() as pool: cargs = cli.CommandLine(cli.optsfile('main')) results = pool.starmap(f, nd.nodegen(cargs.args)) with NamedTemporaryFile(mode='wb', delete=False) as fp: pickle.dump(results, fp) msg = 'pickle: {0}'.format(fp.name) Logger().log.error(msg) # with open('/tmp/tmpe2x8wi0d', mode='rb') as fp: # results = pickle.load(fp) header = [ 'type', 'source', 'target', 'pearson', 'spearman', 'delay',
cargs = cli.CommandLine(cli.optsfile('prediction')) # /etc/opts/prediction config = ConfigParser() config.read(cargs.args.config) # --config params = config['parameters'] writer = ResultsWriter(config['output'].getboolean('print-header')) # Establish the database credentials. Passing None uses the # defaults. dbinfo = config['database'] if 'database' in config else None db.EstablishCredentials(**dbinfo) # # Processing # log.info('phase 2') if 'node' in params: results = run((0, int(params['node']), config)) writer.write(results) else: with Pool() as pool: for i in pool.imap_unordered(run, nodegen(config), 1): writer.write(i) # # Tear down # log.info('phase 3')
cargs = cli.CommandLine(cli.optsfile('chgpt')) args = cargs.args oneday = round(constant.day / constant.minute) window = nd.Window(args.window_obs, args.window_pred, args.window_trgt) if args.resume: with open(args.resume, mode='rb') as fp: observations = pickle.load(fp) (measurements, nodes) = data.cleanse(observations) else: db.genop(args.reporting) opts = [ window, oneday, args.threshold, np.mean ] with Pool() as pool: observations = pool.starmap(f, nd.nodegen(opts)) observations = list(filter(None, observations)) assert(observations) if args.pickle: with open(args.pickle, mode='wb') as fp: pickle.dump(observations, fp) if args.figures: # and args.verbose: aggregate = [] for i in range(oneday): vals = [ x[i] for x in observations ] aggregate.append(np.mean(vals)) aggregate.append(0) # aggregate = measurements.mean(axis=0).tolist() + [ 0 ] # log.debug(len(measurements), len(aggregate))