def test_halo_lc(): '''Test loading halo light cones ''' from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog cat = CompaSOHaloCatalog( curdir / 'halo_light_cones/AbacusSummit_base_c000_ph001-abridged/z2.250/', fields='all', subsamples=True) assert (cat.halo_lc == True) HALO_LC_CAT = refdir / 'halo_lc_cat.asdf' HALO_LC_SUBSAMPLES = refdir / 'halo_lc_subsample.asdf' # generate reference #ref = cat.halos #ref.write(HALO_LC_CAT, format='asdf', all_array_storage='internal', all_array_compression='blsc') #ref = cat.subsamples #ref.write(HALO_LC_SUBSAMPLES, format='asdf', all_array_storage='internal', all_array_compression='blsc') ref = Table.read(HALO_LC_CAT) halos = cat.halos for col in ref.colnames: assert check_close(ref[col], halos[col]) assert halos.meta == ref.meta ref = Table.read(HALO_LC_SUBSAMPLES) ss = cat.subsamples for col in ref.colnames: assert check_close(ref[col], ss[col]) assert ss.meta == ref.meta
def test_halos_clean(tmp_path): '''Test loading a base (uncleaned) halo catalog ''' from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=True, fields='all', cleaned=True) # to regenerate reference #ref = cat.halos #ref.write(HALOS_OUTPUT_CLEAN, all_array_storage='internal', all_array_compression='blsc') ref = Table.read(HALOS_OUTPUT_CLEAN) halos = cat.halos for col in ref.colnames: assert check_close(ref[col], halos[col]) # all haloindex values should point to this slab assert np.all((halos['haloindex'] / 1e12).astype(int) == cat.header['FullStepNumber']) # ensure that all deleted halos in ref are marked as merged in EXAMPLE_SIM assert np.all(halos['is_merged_to'][ref['N'] == 0] != -1) # no deleted halos in ref should have merged particles in EXAMPLE_SIM assert np.all(halos['N_merge'][ref['N'] == 0] == 0) assert halos.meta == ref.meta
def test_subsamples_clean(tmp_path): '''Test loading particle subsamples ''' from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=True, fields='all', cleaned=True) # to regenerate reference #ref = cat.subsamples #import asdf; asdf.compression.set_compression_options(typesize='auto') #ref.write(PARTICLES_OUTPUT_CLEAN, format='asdf', all_array_storage='internal', all_array_compression='blsc') ref = Table.read(PARTICLES_OUTPUT_CLEAN) ss = cat.subsamples for col in ref.colnames: assert check_close(ref[col], ss[col]) # total number of particles in ref should be equal to the sum total of npout{AB} in EXAMPLE_SIM assert len(ref) == np.sum(cat.halos['npoutA']) + np.sum( cat.halos['npoutB']) assert cat.subsamples.meta == ref.meta
def test_subsamples_unclean(tmp_path): '''Test loading particle subsamples ''' from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=dict(A=True), fields='all', cleaned=False) lenA = len(cat.subsamples) assert lenA == 2975 assert cat.subsamples.colnames == ['pos', 'vel'] cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=dict(B=True), fields='all', cleaned=False) lenB = len(cat.subsamples) assert lenB == 7082 cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=True, fields='all', cleaned=False) assert len(cat.subsamples) == lenA + lenB # to regenerate reference #ref = cat.subsamples #import asdf; asdf.compression.set_compression_options(typesize='auto') #ref.write(PARTICLES_OUTPUT_UNCLEAN, format='asdf', all_array_storage='internal', all_array_compression='blsc') ref = Table.read(PARTICLES_OUTPUT_UNCLEAN) ss = cat.subsamples for col in ref.colnames: assert check_close(ref[col], ss[col]) assert cat.subsamples.meta == ref.meta
def test_halos_unclean(tmp_path): '''Test loading a base (uncleaned) halo catalog ''' from abacusnbody.data.compaso_halo_catalog import CompaSOHaloCatalog cat = CompaSOHaloCatalog(EXAMPLE_SIM / 'halos' / 'z0.000', subsamples=True, fields='all', cleaned=False) # to regenerate reference #ref = cat.halos #ref.write(HALOS_OUTPUT_UNCLEAN, all_array_storage='internal', all_array_compression='blsc') ref = Table.read(HALOS_OUTPUT_UNCLEAN) halos = cat.halos for col in ref.colnames: assert check_close(ref[col], halos[col]) assert halos.meta == ref.meta
def test_hod(tmp_path, reference_mode = False): '''Test loading a halo catalog ''' from abacusnbody.hod import prepare_sim from abacusnbody.hod.abacus_hod import AbacusHOD config = yaml.safe_load(open(EXAMPLE_CONFIG)) # inform abacus_hod where the simulation files are, relative to the cwd config['sim_params']['sim_dir'] = pjoin(TESTDIR, 'halo_light_cones') sim_params = config['sim_params'] HOD_params = config['HOD_params'] clustering_params = config['clustering_params'] # reference mode if reference_mode: print("Generating new reference files...") prepare_sim.main(EXAMPLE_CONFIG) # additional parameter choices want_rsd = HOD_params['want_rsd'] bin_params = clustering_params['bin_params'] # create a new abacushod object newBall = AbacusHOD(sim_params, HOD_params, clustering_params) mock_dict = newBall.run_hod(newBall.tracers, want_rsd, write_to_disk = True, Nthread = 2) # test mode else: simname = config['sim_params']['sim_name'] # "AbacusSummit_base_c000_ph006" simdir = config['sim_params']['sim_dir'] z_mock = config['sim_params']['z_mock'] # all output dirs should be under tmp_path config['sim_params']['output_dir'] = pjoin(tmp_path, 'data_mocks_summit_new') + '/' config['sim_params']['subsample_dir'] = pjoin(tmp_path, "data_subs") + '/' config['sim_params']['scratch_dir'] = pjoin(tmp_path, "data_gals") + '/' savedir = config['sim_params']['subsample_dir'] + simname+"/z"+str(z_mock).ljust(5, '0') # check subsample file match prepare_sim.main(EXAMPLE_CONFIG, params = config) newhalos = h5py.File(savedir+'/halos_xcom_0_seed600_abacushod_oldfenv_MT_new.h5', 'r')['halos'] temphalos = h5py.File(EXAMPLE_SUBSAMPLE_HALOS, 'r')['halos'] for i in range(len(newhalos)): for j in range(len(newhalos[i])): assert check_close(newhalos[i][j], temphalos[i][j]) newparticles = h5py.File(savedir+'/particles_xcom_0_seed600_abacushod_oldfenv_MT_new.h5', 'r')['particles'] tempparticles = h5py.File(EXAMPLE_SUBSAMPLE_PARTS, 'r')['particles'] for i in range(len(newparticles)): for j in range(len(newparticles[i])): assert check_close(newparticles[i][j], tempparticles[i][j]) # additional parameter choices want_rsd = HOD_params['want_rsd'] write_to_disk = HOD_params['write_to_disk'] bin_params = clustering_params['bin_params'] rpbins = np.logspace(bin_params['logmin'], bin_params['logmax'], bin_params['nbins']) pimax = clustering_params['pimax'] pi_bin_size = clustering_params['pi_bin_size'] # create a new abacushod object newBall = AbacusHOD(sim_params, HOD_params, clustering_params) # throw away run for jit to compile, write to disk mock_dict = newBall.run_hod(newBall.tracers, want_rsd, write_to_disk = True, Nthread = 2) savedir_gal = config['sim_params']['output_dir']\ +"/"+simname+"/z"+str(z_mock).ljust(5, '0') +"/galaxies_rsd/LRGs.dat" data = ascii.read(EXAMPLE_LRGS) data1 = ascii.read(savedir_gal) for ekey in data.keys(): assert check_close(data[ekey], data1[ekey]) savedir_gal = config['sim_params']['output_dir']\ +"/"+simname+"/z"+str(z_mock).ljust(5, '0') +"/galaxies_rsd/ELGs.dat" data = ascii.read(EXAMPLE_ELGS) data1 = ascii.read(savedir_gal) for ekey in data.keys(): assert check_close(data[ekey], data1[ekey])