def test_check_toeplitz(): data = get_data() config = get_config_dict() assert (-1, -1, -1) == data.check_toeplitz('cls') assert (2750, 1000, 2000) == data.check_toeplitz('cov') remove_yml_file(config) del config['toeplitz'] data = Data(data=config) assert (-1, -1, -1) == data.check_toeplitz('cls') assert (-1, -1, -1) == data.check_toeplitz('cov') remove_yml_file(config)
def test_read_saved_data(): data = get_data() config = get_config_dict() config['bpw_edges'] = [0, 10] # Since there's a yml saved in outdir, it will read it instead of config data2 = Data(data=config) assert data.data == data2.data remove_yml_file(data.data)
def test_initizalization(): input_file = get_input_file() config = read_yaml_file(input_file) # Check that inputting both a file path and config dictionary rieses an # error with pytest.raises(ValueError): Data(input_file, data=config) config['bpw_edges'] = [0, 10] data2 = Data(data=config, override=True) assert os.path.isfile('xcell/tests/cls/data.yml') assert data2.data['bpw_edges'] == [0, 10] remove_yml_file(config) # Check ignore_existing_yml data = get_data() data2 = Data(data=config, ignore_existing_yml=True) assert data2.data['bpw_edges'] != data.data['bpw_edges'] remove_yml_file(config) # Check error is rised when override and ignore_existing_yml are True with pytest.raises(ValueError): data2 = Data(data=config, ignore_existing_yml=True, override=True)
nargs='+', help= 'Skip the following tracers. It can be given as DELS__0 to skip only DELS__0 tracer or DELS to skip all DELS tracers' ) parser.add_argument( '--override_yaml', default=False, action='store_true', help= 'Override the YAML file if already stored. Be ware that this could cause compatibility problems in your data!' ) args = parser.parse_args() ############################################################################## data = Data(data_path=args.INPUT, override=args.override_yaml) queue = args.queue njobs = args.njobs onlogin = args.onlogin if args.compute == 'cls': launch_cls(data, queue, njobs, args.nc, args.mem, args.cls_fiducial, onlogin, args.skip) elif args.compute == 'cov': launch_cov(data, queue, njobs, args.nc, args.mem, onlogin, args.skip) elif args.compute == 'to_sacc': if args.to_sacc_use_nl and args.to_sacc_use_fiducial: raise ValueError( 'Only one of --to_sacc_use_nl or --to_sacc_use_fiducial can be set' )
def get_data(fsky0=0.2, fsky1=0.3, dtype0='galaxy_density', dtype1='galaxy_shear'): config = get_config(fsky0, fsky1, dtype0, dtype1) return Data(data=config)
def test_covariance_extra(): # Generate a config file with extra covariance config = get_config().copy() config['cov'].update({ 'extra': { 'path': os.path.join(tmpdir, 'dummy_cov.npy'), 'order': [ 'Dummy-Dummy', 'Dummy-DummyWL', 'Dummy-DummyCV', 'DummyWL-DummyWL', 'DummyWL-DummyCV', 'DummyCV-DummyCV' ] } }) config['cls'].update({ 'Dummy-Dummy': { 'compute': 'all' }, 'Dummy-DummyWL': { 'compute': 'all' }, 'Dummy-DummyCV': { 'compute': 'all' }, 'DummyWL-DummyWL': { 'compute': 'auto' }, 'DummyWL-DummyCV': { 'compute': 'all' }, 'DummyCV-DummyCV': { 'compute': 'all' } }) # Add extra difficulty by adding 2 more tracers config['tracers']['DummyWL'] = config['tracers']['Dummy__1'].copy() config['tracers']['DummyCV'] = config['tracers']['Dummy__0'].copy() config['tracers']['DummyCV']['dtype'] = 'cmb_convergence' # Overwrite 'data.yml' with new configuration data = Data(data=config, override=True) datafile = os.path.join(data.data['output'], 'data.yml') # Populate a sacc file with cls and covG. This will be used as extra # covariance later. Note that the tracers are in different order than in # the config file when reading the yml file. s = ClSack(datafile, 'cls_cov_dummy.fits', 'cls') covmat = s.s.covariance.covmat # Set B-modes to 0 as they are set to 0 when reading the extra covariance for dt in s.s.get_data_types(): if 'b' in dt: ix = s.s.indices(data_type=dt) covmat[ix] = 0 covmat[:, ix] = 0 # Prepare the "extra" covariance. So far extra covs with B-modes are not # implemented Keep only spin-0 and E-modes # Not done in previous loop because the indices vary for the sacc file and # not longer correspond to those in covmat for dt in s.s.get_data_types(): if 'b' in dt: s.s.remove_selection(data_type=dt) # Reorder the covariance as stated in the config ix_reorder_d = {k: [] for k in s.data.data['cls'].keys()} for trs in s.s.get_tracer_combinations(): key = s.data.get_tracers_bare_name_pair(*trs) ix_reorder_d[key].extend(s.s.indices(tracers=trs)) ix_reorder = [] for key in s.data.data['cov']['extra']['order']: if key not in ix_reorder_d: key = '-'.join(key.split('-')[::-1]) ix_reorder.extend(ix_reorder_d[key]) # Save the reordered covariance np.save(config['cov']['extra']['path'], s.s.covariance.covmat[ix_reorder][:, ix_reorder]) # Populate a sacc file with nls and cov extra (read previous cov) s2 = ClSack(datafile, 'cls_cov_dummy.fits', 'nl') covmat2 = s2.s.covariance.covmat # Remove B-modes to test if spin-0 and E-modes are correctly taken into # account for dt in s2.s.get_data_types(): if 'b' in dt: s2.s.remove_selection(data_type=dt) # Check if the covariance is correctly read with no B-modes cov_no_B = s.s.covariance.covmat cov_no_B2 = s2.s.covariance.covmat assert np.max(np.abs((cov_no_B - cov_no_B2) / np.mean(cov_no_B))) < 1e-5 # Check if the full covariance is correctly generated assert np.max(np.abs((covmat2 - covmat) / np.mean(covmat))) < 1e-5
def test_get_tracer_matrix(): # No cls from data c = get_config_dict() data = Data(data=c) m = data.get_tracer_matrix() for t1, t2 in get_tracer_pair_iterator(data): assert not m[(t1, t2)]['clcov_from_data'] remove_yml_file(c) # All cls from data c = get_config_dict() c['cov']['cls_from_data'] = 'all' data = Data(data=c) m = data.get_tracer_matrix() for t1, t2 in get_tracer_pair_iterator(data): assert m[(t1, t2)]['clcov_from_data'] remove_yml_file(c) # Group cls from data (all) c = get_config_dict() c['cov']['cls_from_data'] = {'DESgc-DESgc': {'compute': 'all'}} data = Data(data=c) m = data.get_tracer_matrix() for t1, t2 in get_tracer_pair_iterator(data): tpair = data.get_tracers_bare_name_pair(t1, t2) if tpair == 'DESgc-DESgc': assert m[(t1, t2)]['clcov_from_data'] else: assert not m[(t1, t2)]['clcov_from_data'] remove_yml_file(c) # Group cls from data (auto) c = get_config_dict() c['cov']['cls_from_data'] = {'DESgc-DESgc': {'compute': 'auto'}} data = Data(data=c) m = data.get_tracer_matrix() for t1, t2 in get_tracer_pair_iterator(data): tpair = data.get_tracers_bare_name_pair(t1, t2) if tpair == 'DESgc-DESgc' and t1 == t2: assert m[(t1, t2)]['clcov_from_data'] else: assert not m[(t1, t2)]['clcov_from_data'] remove_yml_file(c) # Some cls from data c = get_config_dict() c['cov']['cls_from_data'] = ['DESgc__0-DESgc__0', 'DESgc__1-DESwl__1'] data = Data(data=c) m = data.get_tracer_matrix() for t1, t2 in get_tracer_pair_iterator(data): if t1 == 'DESgc__0' and t2 == 'DESgc__0': assert m[(t1, t2)]['clcov_from_data'] elif t1 == 'DESgc__1' and t2 == 'DESwl__1': assert m[(t1, t2)]['clcov_from_data'] elif t1 == 'DESwl__1' and t2 == 'DESgc__1': assert m[(t1, t2)]['clcov_from_data'] else: assert not m[(t1, t2)]['clcov_from_data'] remove_yml_file(c)
def test_input_from_another_file(): # This tests whether we can load yaml files that # include other yaml files within them. d = Data('xcell/tests/data/conftest.yml') assert d.data['tracers']['DESgc__1']['bias'] == 1.76 remove_yml_file(d.data)
def get_data_from_dict(): input_dict = get_config_dict() return Data(data=input_dict)
def get_data(): input_file = get_input_file() return Data(input_file)