logger.debug("{} {}: Loading samples".format(name, i)) ez_samples = get_ez_samples(metadata['APOGEE_ID'][pixel_mask]) # Run run_pixel(name, i, ez_samples, cache_path, plot_path, pool, nwalkers=80) if __name__ == '__main__': # Define parser object parser = get_parser(description='Hierarchical inference for main sequence', loggers=logger) group = parser.add_mutually_exclusive_group() group.add_argument("--procs", dest="n_procs", default=1, type=int, help="Number of processes.") group.add_argument("--mpi", dest="mpi", default=False, action="store_true", help="Run with MPI.") parser.add_argument("-o", "--overwrite",
for samples in pool.map(_prior_cache_worker, tasks): if samples is not None: all_samples.append(samples) with h5py.File(c.prior_cache_file, 'w') as f: for i, samples in enumerate(all_samples): samples.write(f, append=i > 0) logger.debug("...done generating cache.") if __name__ == "__main__": # Define parser object parser = get_parser(description='Generate an HQ run template with the ' 'specified name. After running this, you ' 'then have to go in to the run directory ' 'and edit the configuration.', loggers=logger) parser.add_argument("-s", "--seed", dest="seed", default=None, type=int, help="Random number seed") args = parser.parse_args() with args.Pool(**args.Pool_kwargs) as pool: main(args.run_name, pool=pool,
tasks = batch_tasks(len(apogee_ids), n_tasks, arr=apogee_ids, args=(c, )) logger.info(f'Done preparing tasks: split into {len(tasks)} task chunks') results = [] for r in tqdm(pool.map(worker, tasks), total=len(tasks)): results.append(r) tbl = Table([item for sublist in results for item in sublist]) tbl.write(results_path, overwrite=True) if __name__ == '__main__': from threadpoolctl import threadpool_limits # Define parser object parser = get_parser(description='Run The Joker on APOGEE data', loggers=[logger, joker_logger]) parser.add_argument("-s", "--seed", dest="seed", default=None, type=int, help="Random number seed") args = parser.parse_args() with threadpool_limits(limits=1, user_api='blas'): with args.Pool(**args.Pool_kwargs) as pool: main(run_name=args.run_name, pool=pool, overwrite=args.overwrite,
# Write the MCMC metadata table tbl = vstack(sub_tbls) for k in result['units']: tbl[k].unit = result['units'][k] tbl = QTable(tbl) tbl.write(c.metadata_mcmc_path, overwrite=True) # Now write out all of the individual samplings: with h5py.File(c.mcmc_results_path, 'a') as results_f: for apogee_id, samples in all_samples.items(): if apogee_id in results_f: del results_f[apogee_id] g = results_f.create_group(apogee_id) samples.write(g) if __name__ == '__main__': from threadpoolctl import threadpool_limits from hq.script_helpers import get_parser # Define parser object parser = get_parser(description='TODO', loggers=logger) args = parser.parse_args() # with threadpool_limits(limits=1, user_api='blas'): with args.Pool(**args.Pool_kwargs) as pool: main(run_name=args.run_name, pool=pool) sys.exit(0)