def test_different_component_forms(): """Check component forms can be different""" tiny_age = 1e-10 mean1 = np.zeros(6) covmatrix1 = np.eye(6) * 4 comp1 = SphereComponent(attributes={ 'mean':mean1, 'covmatrix':covmatrix1, 'age':tiny_age, }) mean2 = np.zeros(6) + 10. covmatrix2 = np.eye(6) * 9 comp2 = EllipComponent(attributes={ 'mean':mean2, 'covmatrix':covmatrix2, 'age':tiny_age, }) starcounts = [100,100] synth_data = SynthData(pars=[comp1.get_pars(), comp2.get_pars()], starcounts=starcounts, Components=[SphereComponent, EllipComponent]) synth_data.synthesise_everything() assert len(synth_data.table) == np.sum(starcounts)
def test_expectation(): """ Super basic, generates some association stars along with some background stars and checks membership allocation is correct """ age = 1e-5 ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age]) comp1 = SphereComponent(ass_pars1) ass_pars2 = np.array([100., 0, 0, 20, 0, 0, 5., 2., age]) comp2 = SphereComponent(ass_pars2) starcounts = [100,100] synth_data = SynthData(pars=[ass_pars1, ass_pars2], starcounts=starcounts) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) true_memb_probs = np.zeros((np.sum(starcounts), 2)) true_memb_probs[:starcounts[0], 0] = 1. true_memb_probs[starcounts[0]:, 1] = 1. # star_means, star_covs = tabletool.buildDataFromTable(synth_data.astr_table) # all_lnols = em.getAllLnOverlaps( # synth_data.astr_table, [comp1, comp2] # ) fitted_memb_probs = em.expectation( tabletool.build_data_dict_from_table(synth_data.table), [comp1, comp2] ) assert np.allclose(true_memb_probs, fitted_memb_probs, atol=1e-10)
def test_pythonFuncs(): """ TODO: remove the requirements of file, have data stored in file? """ true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3, :3] *= true_comp_dx ** 2 true_comp_covmatrix[3:, 3:] *= true_comp_dv ** 2 true_comp_age = 1e-10 true_comp = SphereComponent(attributes={ 'mean': true_comp_mean, 'covmatrix': true_comp_covmatrix, 'age': true_comp_age, }) nstars = 100 synth_data = SynthData(pars=true_comp.get_pars(), starcounts=nstars) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) star_data = tabletool.build_data_dict_from_table(synth_data.table) # star_data['means'] = star_data['means'] # star_data['covs'] = star_data['covs'] group_mean = true_comp.get_mean() group_cov = true_comp.get_covmatrix() # Test overlap with true component co1s = [] co2s = [] for i, (scov, smn) in enumerate(zip(star_data['covs'], star_data['means'])): co1s.append(co1(group_cov, group_mean, scov, smn)) co2s.append(co2(group_cov, group_mean, scov, smn)) co1s = np.array(co1s) co2s = np.array(co2s) co3s = np.exp(p_lno(group_cov, group_mean, star_data['covs'], star_data['means'])) assert np.allclose(co1s, co2s) assert np.allclose(co2s, co3s) assert np.allclose(co1s, co3s) # Test overlap with neighbouring star (with the aim of testing # tiny overlap values). Note that most overlaps go to 0, but the # log overlaps retain the information co1s = [] co2s = [] for i, (scov, smn) in enumerate(zip(star_data['covs'], star_data['means'])): co1s.append(co1(star_data['covs'][15], star_data['means'][15], scov, smn)) co2s.append(co2(star_data['covs'][15], star_data['means'][15], scov, smn)) co1s = np.array(co1s) co2s = np.array(co2s) lnos = p_lno(star_data['covs'][15], star_data['means'][15], star_data['covs'], star_data['means']) co3s = np.exp(lnos) assert np.allclose(co1s, co2s) assert np.allclose(co2s, co3s) assert np.allclose(co1s, co3s)
def test_generateInitXYZUVW(): """Check that the mean of initial xyzuvw of stars matches that of the initialising component""" starcounts = (int(1e6), ) sd = SynthData(pars=PARS[:1], starcounts=starcounts, Components=COMPONENTS) sd.generate_all_init_cartesian() comp = SphereComponent(PARS[0]) init_xyzuvw = sd.extract_data_as_array([dim + '0' for dim in 'xyzuvw']) assert np.allclose(comp.get_mean(), np.mean(init_xyzuvw, axis=0), atol=0.1)
def test_generateInitXYZUVW(): """Check that the mean of initial xyzuvw of stars matches that of the initialising component""" starcounts = (int(1e6),) sd = SynthData(pars=PARS[:1], starcounts=starcounts, Components=COMPONENTS) sd.generate_all_init_cartesian() comp = SphereComponent(PARS[0]) init_xyzuvw = sd.extract_data_as_array([dim + '0' for dim in 'xyzuvw']) assert np.allclose(comp.get_mean(), np.mean(init_xyzuvw, axis=0), atol=0.1)
def test_lnprob_func(): """ Generates two components. Generates a synthetic data set based on the first component. Confrims that the lnprob is larger for the first component than the second. """ measurement_error = 1e-10 star_count = 500 tiny_age = 1e-10 dim = 6 comp_covmatrix = np.identity(dim) comp_means = { 'comp1': np.zeros(dim), 'comp2': 10 * np.ones(dim) } comps = {} data = {} for comp_name in comp_means.keys(): comp = SphereComponent(attributes={ 'mean':comp_means[comp_name], 'covmatrix':comp_covmatrix, 'age':tiny_age }) synth_data = SynthData(pars=[comp.get_pars()], starcounts=star_count, measurement_error=measurement_error) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) data[comp_name] = tabletool.build_data_dict_from_table(synth_data.table) comps[comp_name] = comp lnprob_comp1_data1 = likelihood.lnprob_func(pars=comps['comp1'].get_pars(), data=data['comp1']) lnprob_comp2_data1 = likelihood.lnprob_func(pars=comps['comp2'].get_pars(), data=data['comp1']) lnprob_comp1_data2 = likelihood.lnprob_func(pars=comps['comp1'].get_pars(), data=data['comp2']) lnprob_comp2_data2 = likelihood.lnprob_func(pars=comps['comp2'].get_pars(), data=data['comp2']) print(lnprob_comp1_data1) print(lnprob_comp2_data1) print(lnprob_comp1_data2) print(lnprob_comp2_data2) assert lnprob_comp1_data1 > lnprob_comp2_data1 assert lnprob_comp2_data2 > lnprob_comp1_data2 # Check that the different realisations only differ by 20% assert np.isclose(lnprob_comp1_data1, lnprob_comp2_data2, rtol=2e-1) assert np.isclose(lnprob_comp1_data2, lnprob_comp2_data1, rtol=2e-1)
def test_stationary_component(): """ Integrated test which fits a single component to a synthetic association. Runtime on my mac (single thread) is ~ 20 mins. Check logs/compfitter.log and temp_plots/*.png for progress. Takes about 10 mins single thread with C implementation of overlap or ~40 mins with python implementation of overlap """ # log_filename = 'logs/compfitter_stationary.log' # synth_data_savefile = 'temp_data/compfitter_stationary_synthdata.fits' short_burnin_step = 200 true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3,:3] *= true_comp_dx**2 true_comp_covmatrix[3:,3:] *= true_comp_dv**2 true_comp_age = 1e-10 true_comp = SphereComponent(attributes={ 'mean':true_comp_mean, 'covmatrix':true_comp_covmatrix, 'age':true_comp_age, }) nstars = 100 measurement_error = 1e-10 best_comp, chain, lnprob = run_fit_helper( true_comp=true_comp, starcounts=nstars, measurement_error=measurement_error, run_name='stationary', burnin_step=short_burnin_step, trace_orbit_func=dummy_trace_orbit_func, ) np.save('temp_data/{}_compfitter_stationary_' \ 'true_and_best_comp.npy'.format(PY_VERS), [true_comp, best_comp],) assert np.allclose(true_comp.get_mean(), best_comp.get_mean(), atol=1.0) assert np.allclose(true_comp.get_age(), best_comp.get_age(), atol=1.0) assert np.allclose(true_comp.get_covmatrix(), best_comp.get_covmatrix(), atol=2.0)
def test_get_lnoverlaps(): """ Confirms that star-component overlaps get smaller as stars get further away. First generates a component `sphere_comp`. Then generates three stars. The first one is identical to `sphere_comp` in mean and covmatrix. The other two share the same covmatrix yet are separated in X. We check that the overlap integral is smaller for the more separated stars. """ dim = 6 mean = np.zeros(dim) covmatrix = np.identity(dim) age = 1e-10 sphere_comp = SphereComponent(attributes={ 'mean':mean, 'covmatrix':covmatrix, 'age':age, }) dx_offsets = [0., 1., 10.] star_comps = [] for dx_offset in dx_offsets: star = SphereComponent(attributes={ 'mean':sphere_comp.get_mean()+np.array([dx_offset,0.,0.,0.,0.,0.]), 'covmatrix':sphere_comp.get_covmatrix(), 'age':sphere_comp.get_age(), }) star_comps.append(star) nstars = len(star_comps) dummy_table = Table(data=np.arange(nstars).reshape(nstars,1), names=['name']) tabletool.append_cart_cols_to_table(dummy_table) for star_comp, row in zip(star_comps, dummy_table): tabletool.insert_data_into_row(row, star_comp.get_mean(), star_comp.get_covmatrix(), cartesian=True, ) dummy_data = tabletool.build_data_dict_from_table(dummy_table) ln_overlaps = likelihood.get_lnoverlaps(sphere_comp, data=dummy_data) # Checks that ln_overlaps is descending assert np.allclose(ln_overlaps, sorted(ln_overlaps)[::-1])
def test_stationary_component(): """ Integrated test which fits a single component to a synthetic association. Runtime on my mac (single thread) is ~ 20 mins. Check logs/compfitter.log and temp_plots/*.png for progress. Takes about 10 mins single thread with C implementation of overlap or ~40 mins with python implementation of overlap """ # log_filename = 'logs/compfitter_stationary.log' # synth_data_savefile = 'temp_data/compfitter_stationary_synthdata.fits' short_burnin_step = 200 true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3, :3] *= true_comp_dx**2 true_comp_covmatrix[3:, 3:] *= true_comp_dv**2 true_comp_age = 1e-10 true_comp = SphereComponent( attributes={ 'mean': true_comp_mean, 'covmatrix': true_comp_covmatrix, 'age': true_comp_age, }) nstars = 100 measurement_error = 1e-10 best_comp, chain, lnprob = run_fit_helper( true_comp=true_comp, starcounts=nstars, measurement_error=measurement_error, run_name='stationary', burnin_step=short_burnin_step, trace_orbit_func=dummy_trace_orbit_func, ) np.save('temp_data/{}_compfitter_stationary_' \ 'true_and_best_comp.npy'.format(PY_VERS), [true_comp, best_comp],) assert np.allclose(true_comp.get_mean(), best_comp.get_mean(), atol=1.0) assert np.allclose(true_comp.get_age(), best_comp.get_age(), atol=1.0) assert np.allclose(true_comp.get_covmatrix(), best_comp.get_covmatrix(), atol=2.0)
def test_get_best_from_chain(): # Triplicate sphere pars (are copies) # Represents a chain with 2 walkers and 3 steps intern_sphere_pars = SphereComponent.internalise(SPHERE_PARS) dummy_chain = np.array( [[intern_sphere_pars, intern_sphere_pars, intern_sphere_pars], [intern_sphere_pars, intern_sphere_pars, intern_sphere_pars]]) dummy_lnprob = np.zeros(dummy_chain.shape[:2]) # Incorporate identifying marker at desired index true_best_ix = (1, 1) dummy_chain[true_best_ix][0] = 10. dummy_lnprob[true_best_ix] = 1. best_comp = SphereComponent.get_best_from_chain(dummy_chain, dummy_lnprob) assert np.allclose(dummy_chain[true_best_ix], best_comp.get_emcee_pars())
def test_lnprob_func(): """ Generates two components. Generates a synthetic data set based on the first component. Confrims that the lnprob is larger for the first component than the second. """ measurement_error = 1e-10 star_count = 500 tiny_age = 1e-10 dim = 6 comp_covmatrix = np.identity(dim) comp_means = { 'comp1': np.zeros(dim), 'comp2': 10 * np.ones(dim) } comps = {} data = {} for comp_name in comp_means.keys(): comp = SphereComponent(attributes={ 'mean':comp_means[comp_name], 'covmatrix':comp_covmatrix, 'age':tiny_age }) synth_data = SynthData(pars=[comp.get_pars()], starcounts=star_count, measurement_error=measurement_error) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) data[comp_name] = tabletool.build_data_dict_from_table(synth_data.table) comps[comp_name] = comp lnprob_comp1_data1 = likelihood.lnprob_func(pars=comps['comp1'].get_pars(), data=data['comp1']) lnprob_comp2_data1 = likelihood.lnprob_func(pars=comps['comp2'].get_pars(), data=data['comp1']) lnprob_comp1_data2 = likelihood.lnprob_func(pars=comps['comp1'].get_pars(), data=data['comp2']) lnprob_comp2_data2 = likelihood.lnprob_func(pars=comps['comp2'].get_pars(), data=data['comp2']) assert lnprob_comp1_data1 > lnprob_comp2_data1 assert lnprob_comp2_data2 > lnprob_comp1_data2 # Check that the different realisations only differ by 10% assert np.isclose(lnprob_comp1_data1, lnprob_comp2_data2, rtol=1e-1) assert np.isclose(lnprob_comp1_data2, lnprob_comp2_data1, rtol=1e-1)
def test_swigImplementation(): """ Compares the swigged c implementation against the python one in likelihood.py """ true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3, :3] *= true_comp_dx**2 true_comp_covmatrix[3:, 3:] *= true_comp_dv**2 true_comp_age = 1e-10 true_comp = SphereComponent( attributes={ 'mean': true_comp_mean, 'covmatrix': true_comp_covmatrix, 'age': true_comp_age, }) nstars = 100 synth_data = SynthData(pars=true_comp.get_pars(), starcounts=nstars) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) star_data = tabletool.build_data_dict_from_table(synth_data.table) p_lnos = p_lno(true_comp.get_covmatrix(), true_comp.get_mean(), star_data['covs'], star_data['means']) c_lnos = c_lno(true_comp.get_covmatrix(), true_comp.get_mean(), star_data['covs'], star_data['means'], nstars) assert np.allclose(p_lnos, c_lnos) assert np.isfinite(p_lnos).all() assert np.isfinite(c_lnos).all()
def test_execution_simple_fit(): """ Don't test for correctness, but check that everything actually executes """ run_name = 'quickdirty' logging.info(60 * '-') logging.info(15 * '-' + '{:^30}'.format('TEST: ' + run_name) + 15 * '-') logging.info(60 * '-') savedir = 'temp_data/{}_expectmax_{}/'.format(PY_VERS, run_name) mkpath(savedir) data_filename = savedir + '{}_expectmax_{}_data.fits'.format( PY_VERS, run_name) log_filename = 'temp_data/{}_expectmax_{}/log.log'.format( PY_VERS, run_name) logging.basicConfig(level=logging.INFO, filemode='w', filename=log_filename) uniform_age = 1e-10 sphere_comp_pars = np.array([ # X, Y, Z, U, V, W, dX, dV, age, [0, 0, 0, 0, 0, 0, 10., 5, uniform_age], ]) starcount = 100 background_density = 1e-9 ncomps = sphere_comp_pars.shape[0] # true_memb_probs = np.zeros((starcount, ncomps)) # true_memb_probs[:,0] = 1. synth_data = SynthData( pars=sphere_comp_pars, starcounts=[starcount], Components=SphereComponent, background_density=background_density, ) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) background_count = len(synth_data.table) - starcount # insert background densities synth_data.table['background_log_overlap'] =\ len(synth_data.table) * [np.log(background_density)] synth_data.table.write(data_filename, overwrite=True) origins = [SphereComponent(pars) for pars in sphere_comp_pars] best_comps, med_and_spans, memb_probs = \ expectmax.fit_many_comps(data=synth_data.table, ncomps=ncomps, rdir=savedir, burnin=10, sampling_steps=10, trace_orbit_func=dummy_trace_orbit_func, use_background=True, ignore_stable_comps=False, max_em_iterations=200)
def test_lcc_like(): """ Takes about 40 mins """ mean_now = np.array([50., -100., 25., 1.1, -7.76, 2.25]) age = 10. mean = trace_cartesian_orbit(mean_now, times=-age) dx = 5. dv = 2. covmatrix = np.identity(6) covmatrix[:3,:3] *= dx**2 covmatrix[3:,3:] *= dv**2 true_comp = SphereComponent(attributes={ 'mean':mean, 'covmatrix':covmatrix, 'age':age, }) nstars = 100 tiny_measurement_error = 1e-10 short_burnin_step = 200 best_comp, chain, lnprob = run_fit_helper( true_comp=true_comp, starcounts=nstars, measurement_error=tiny_measurement_error, burnin_step=short_burnin_step, run_name='lcc_like', ) np.save('temp_data/{}_compfitter_lcc_like_'\ 'true_and_best_comp.npy'.format(PY_VERS), [true_comp, best_comp],) assert np.allclose(true_comp.get_mean(), best_comp.get_mean(), atol=3.0) assert np.allclose(true_comp.get_age(), best_comp.get_age(), atol=1.0) assert np.allclose(true_comp.get_covmatrix(), best_comp.get_covmatrix(), atol=5.0)
def test_lcc_like(): """ Takes about 40 mins """ mean_now = np.array([50., -100., 25., 1.1, -7.76, 2.25]) age = 10. mean = trace_cartesian_orbit(mean_now, times=-age) dx = 5. dv = 2. covmatrix = np.identity(6) covmatrix[:3, :3] *= dx**2 covmatrix[3:, 3:] *= dv**2 true_comp = SphereComponent(attributes={ 'mean': mean, 'covmatrix': covmatrix, 'age': age, }) nstars = 100 tiny_measurement_error = 1e-10 short_burnin_step = 200 best_comp, chain, lnprob = run_fit_helper( true_comp=true_comp, starcounts=nstars, measurement_error=tiny_measurement_error, burnin_step=short_burnin_step, run_name='lcc_like', ) np.save('temp_data/{}_compfitter_lcc_like_'\ 'true_and_best_comp.npy'.format(PY_VERS), [true_comp, best_comp],) assert np.allclose(true_comp.get_mean(), best_comp.get_mean(), atol=3.0) assert np.allclose(true_comp.get_age(), best_comp.get_age(), atol=1.0) assert np.allclose(true_comp.get_covmatrix(), best_comp.get_covmatrix(), atol=5.0)
def test_multiple_synth_components(): """Check initialising with multiple components works""" age = 1e-10 dx = 5. dv = 2. ass_pars1 = np.array([10, 20, 30, 40, 50, 60, dx, dv, age]) comp1 = SphereComponent(ass_pars1) ass_pars2 = np.array([0., 0., 0, 0, 0, 0, dx, dv, age]) comp2 = SphereComponent(ass_pars2) starcounts = [100, 100] try: synth_data = SynthData(pars=[ass_pars1, ass_pars2], starcounts=starcounts[0], Components=SphereComponent) raise UserWarning('AssertionError should have been thrown by synthdata') except AssertionError: pass synth_data = SynthData(pars=[ass_pars1, ass_pars2], starcounts=starcounts, Components=SphereComponent) synth_data.synthesise_everything() assert len(synth_data.table) == np.sum(starcounts) means = tabletool.build_data_dict_from_table( synth_data.table, main_colnames=[el+'0' for el in 'xyzuvw'], only_means=True ) assert np.allclose(comp2.get_mean(), means[starcounts[0]:].mean(axis=0), atol=2.) assert np.allclose(comp1.get_mean(), means[:starcounts[0]].mean(axis=0), atol=2.)
def test_swigImplementation(): """ Compares the swigged c implementation against the python one in likelihood.py """ true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3,:3] *= true_comp_dx**2 true_comp_covmatrix[3:,3:] *= true_comp_dv**2 true_comp_age = 1e-10 true_comp = SphereComponent(attributes={ 'mean':true_comp_mean, 'covmatrix':true_comp_covmatrix, 'age':true_comp_age, }) nstars = 100 synth_data = SynthData(pars=true_comp.get_pars(), starcounts=nstars) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) star_data = tabletool.build_data_dict_from_table(synth_data.table) p_lnos = p_lno(true_comp.get_covmatrix(), true_comp.get_mean(), star_data['covs'], star_data['means']) c_lnos = c_lno(true_comp.get_covmatrix(), true_comp.get_mean(), star_data['covs'], star_data['means'], nstars) assert np.allclose(p_lnos, c_lnos) assert np.isfinite(p_lnos).all() assert np.isfinite(c_lnos).all()
def test_lnprior(): dim = 6 mean = np.zeros(dim) covmatrix = np.identity(dim) age = 10. sphere_comp = SphereComponent(attributes={ 'mean': mean, 'covmatrix': covmatrix, 'age': age, }) memb_probs = np.ones(10) assert np.isfinite(likelihood.lnprior(sphere_comp, memb_probs)) # Now increase age to something ridiculous sphere_comp.update_attribute(attributes={ 'age': 1e10, }) assert np.isinf(likelihood.lnprior(sphere_comp, memb_probs)) # Try an EllipComponent with a non-symmetrical covariance matrix covmatrix[0, 1] = 1.01 # covmatrix[1,0] = 100 ellip_comp = EllipComponent(attributes={ 'mean': mean, 'covmatrix': covmatrix, 'age': age, }) assert np.isinf(likelihood.lnprior(ellip_comp, memb_probs)) # Try an EllipComponent with a very broken correlation value covmatrix[0, 1] = 1.01 covmatrix[1, 0] = 1.01 ellip_comp = EllipComponent(attributes={ 'mean': mean, 'covmatrix': covmatrix, 'age': age, }) assert np.isinf(likelihood.lnprior(ellip_comp, memb_probs))
def test_lnprior(): dim = 6 mean = np.zeros(dim) covmatrix = np.identity(dim) age = 10. sphere_comp = SphereComponent(attributes={ 'mean':mean, 'covmatrix':covmatrix, 'age':age, }) memb_probs = np.ones(10) assert np.isfinite(likelihood.lnprior(sphere_comp, memb_probs)) # Now increase age to something ridiculous sphere_comp.update_attribute(attributes={ 'age':1e10, }) assert np.isinf(likelihood.lnprior(sphere_comp, memb_probs)) # Try an EllipComponent with a non-symmetrical covariance matrix covmatrix[0,1] = 1.01 # covmatrix[1,0] = 100 ellip_comp = EllipComponent(attributes={ 'mean':mean, 'covmatrix':covmatrix, 'age':age, }) assert np.isinf(likelihood.lnprior(ellip_comp, memb_probs)) # Try an EllipComponent with a very broken correlation value covmatrix[0,1] = 1.01 covmatrix[1,0] = 1.01 ellip_comp = EllipComponent(attributes={ 'mean':mean, 'covmatrix':covmatrix, 'age':age, }) assert np.isinf(likelihood.lnprior(ellip_comp, memb_probs))
def test_externalise_and_internalise_pars(): """Check that pars are successfully converted from internal form (used by emcee) to external form (interacted with by user) successfully""" # Check SphereComponent internal_sphere_pars = np.copy(SPHERE_PARS) internal_sphere_pars[6:8] = np.log(internal_sphere_pars[6:8]) sphere_comp = SphereComponent(emcee_pars=internal_sphere_pars) external_sphere_pars = sphere_comp.get_pars() assert np.allclose(SPHERE_PARS, external_sphere_pars) re_internal_sphere_pars = sphere_comp.internalise(external_sphere_pars) assert np.allclose(internal_sphere_pars, re_internal_sphere_pars) # Check EllipComponent internal_ellip_pars = np.copy(ELLIP_PARS) internal_ellip_pars[6:10] = np.log(internal_ellip_pars[6:10]) ellip_comp = EllipComponent(emcee_pars=internal_ellip_pars) external_ellip_pars = ellip_comp.get_pars() assert np.allclose(ELLIP_PARS, external_ellip_pars) re_internal_ellip_pars = ellip_comp.internalise(external_ellip_pars) assert np.allclose(internal_ellip_pars, re_internal_ellip_pars)
def test_get_lnoverlaps(): """ Confirms that star-component overlaps get smaller as stars get further away. First generates a component `sphere_comp`. Then generates three stars. The first one is identical to `sphere_comp` in mean and covmatrix. The other two share the same covmatrix yet are separated in X. We check that the overlap integral is smaller for the more separated stars. """ dim = 6 mean = np.zeros(dim) covmatrix = np.identity(dim) age = 1e-10 sphere_comp = SphereComponent(attributes={ 'mean': mean, 'covmatrix': covmatrix, 'age': age, }) dx_offsets = [0., 1., 10.] star_comps = [] for dx_offset in dx_offsets: star = SphereComponent( attributes={ 'mean': sphere_comp.get_mean() + np.array([dx_offset, 0., 0., 0., 0., 0.]), 'covmatrix': sphere_comp.get_covmatrix(), 'age': sphere_comp.get_age(), }) star_comps.append(star) nstars = len(star_comps) dummy_table = Table(data=np.arange(nstars).reshape(nstars, 1), names=['name']) tabletool.append_cart_cols_to_table(dummy_table) for star_comp, row in zip(star_comps, dummy_table): tabletool.insert_data_into_row( row, star_comp.get_mean(), star_comp.get_covmatrix(), cartesian=True, ) dummy_data = tabletool.build_data_dict_from_table(dummy_table) ln_overlaps = likelihood.get_lnoverlaps(sphere_comp, data=dummy_data) # Checks that ln_overlaps is descending assert np.allclose(ln_overlaps, sorted(ln_overlaps)[::-1])
def test_maximisation_gradient_descent_with_multiprocessing_tech(): """ Added by MZ 2020 - 07 - 13 Test if maximisation works when using gradient descent and multiprocessing. NOTE: this is not a test if maximisation returns appropriate results but it only tests if the code runs withour errors. This is mainly to test multiprocessing. """ age = 1e-5 ass_pars1 = np.array([0, 0, 0, 0, 0, 0, 5., 2., age]) comp1 = SphereComponent(ass_pars1) starcounts = [100,] synth_data = SynthData(pars=[ass_pars1,], starcounts=starcounts) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) true_memb_probs = np.zeros((np.sum(starcounts), 1)) true_memb_probs[:starcounts[0], 0] = 1. #~ true_memb_probs[starcounts[0]:, 1] = 1. ncomps = len(starcounts) noise = np.random.rand(ass_pars1.shape[0])*5 all_init_pars = [ass_pars1 + noise] new_comps, all_samples, _, all_init_pos, success_mask =\ expectmax.maximisation(synth_data.table, ncomps, true_memb_probs, 100, 'iter00', all_init_pars, optimisation_method='Nelder-Mead', nprocess_ncomp=True, )
def test_pythonFuncs(): """ TODO: remove the requirements of file, have data stored in file? """ true_comp_mean = np.zeros(6) true_comp_dx = 2. true_comp_dv = 2. true_comp_covmatrix = np.identity(6) true_comp_covmatrix[:3, :3] *= true_comp_dx**2 true_comp_covmatrix[3:, 3:] *= true_comp_dv**2 true_comp_age = 1e-10 true_comp = SphereComponent( attributes={ 'mean': true_comp_mean, 'covmatrix': true_comp_covmatrix, 'age': true_comp_age, }) nstars = 100 synth_data = SynthData(pars=true_comp.get_pars(), starcounts=nstars) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) star_data = tabletool.build_data_dict_from_table(synth_data.table) # star_data['means'] = star_data['means'] # star_data['covs'] = star_data['covs'] group_mean = true_comp.get_mean() group_cov = true_comp.get_covmatrix() # Test overlap with true component co1s = [] co2s = [] for i, (scov, smn) in enumerate(zip(star_data['covs'], star_data['means'])): co1s.append(co1(group_cov, group_mean, scov, smn)) co2s.append(co2(group_cov, group_mean, scov, smn)) co1s = np.array(co1s) co2s = np.array(co2s) co3s = np.exp( p_lno(group_cov, group_mean, star_data['covs'], star_data['means'])) assert np.allclose(co1s, co2s) assert np.allclose(co2s, co3s) assert np.allclose(co1s, co3s) # Test overlap with neighbouring star (with the aim of testing # tiny overlap values). Note that most overlaps go to 0, but the # log overlaps retain the information co1s = [] co2s = [] for i, (scov, smn) in enumerate(zip(star_data['covs'], star_data['means'])): co1s.append( co1(star_data['covs'][15], star_data['means'][15], scov, smn)) co2s.append( co2(star_data['covs'][15], star_data['means'][15], scov, smn)) co1s = np.array(co1s) co2s = np.array(co2s) lnos = p_lno(star_data['covs'][15], star_data['means'][15], star_data['covs'], star_data['means']) co3s = np.exp(lnos) assert np.allclose(co1s, co2s) assert np.allclose(co2s, co3s) assert np.allclose(co1s, co3s)
import matplotlib.pyplot as plt import numpy as np import sys sys.path.insert(0, '..') from chronostar.component import SphereComponent from chronostar import tabletool from chronostar import likelihood from chronostar import expectmax component_file = '../results/all_nonbg_scocen_comps.npy' membership_file = '../results/all_scocen_total_membership.npy' joined_table = '../data/scocen/joined_scocen_no_duplicates.fit' star_pars = tabletool.build_data_dict_from_table(joined_table, historical=True) all_comps = SphereComponent.load_raw_components(component_file) init_z = np.load(membership_file) # pop manually determined duplicates if True: all_comps.pop(9) all_comps.pop(6) init_z = init_z[(np.array([0, 1, 2, 3, 4, 5, 7, 8]), )] print(len(all_comps)) print(len(init_z)) init_z.shape = (1, -1) memberships = expectmax.expectation(star_pars, all_comps, old_memb_probs=init_z)
def test_fit_stability_mixed_comps(): """ Have a fit with some iterations that have a mix of stable and unstable comps. TODO: Maybe give 2 similar comps tiny age but overlapping origins """ run_name = 'mixed_stability' logging.info(60 * '-') logging.info(15 * '-' + '{:^30}'.format('TEST: ' + run_name) + 15 * '-') logging.info(60 * '-') savedir = 'temp_data/{}_expectmax_{}/'.format(PY_VERS, run_name) mkpath(savedir) data_filename = savedir + '{}_expectmax_{}_data.fits'.format( PY_VERS, run_name) log_filename = 'temp_data/{}_expectmax_{}/log.log'.format( PY_VERS, run_name) logging.basicConfig(level=logging.INFO, filemode='w', filename=log_filename) shared_cd_mean = np.zeros(6) tiny_age = 0.1 medium_age = 10. # origin_1 = traceorbit.trace_cartesian_orbit(shared_cd_mean, times=-medium_age) # origin_2 = traceorbit.trace_cartesian_orbit(shared_cd_mean, times=-2*medium_age) # # cd_mean_3 = np.array([-200,200,0,0,50,0.]) # origin_3 = traceorbit.trace_cartesian_orbit(cd_mean_3, times=-tiny_age) # # sphere_comp_pars = np.array([ # # X, Y, Z, U, V, W, dX, dV, age, # np.hstack((origin_1, 10., 5., medium_age)), # Next two comps share a current day origin # np.hstack((origin_2, 10., 5., 2*medium_age)), # so hopefully will need several iterations to\ # # disentangle # np.hstack((origin_3, 10., 5., tiny_age)), # a distinct comp that is stable quickly # ]) uniform_age = 1e-10 sphere_comp_pars = np.array([ # X, Y, Z, U, V, W, dX, dV, age, [50, 0, 0, 0, 50, 0, 10., 5, uniform_age], # Very distant (and stable) comp [0, -20, 0, 0, -5, 0, 10., 5, uniform_age], # Overlapping comp 1 [0, 20, 0, 0, 5, 0, 10., 5, uniform_age], # Overlapping comp 2 ]) starcounts = [50, 100, 200] ncomps = sphere_comp_pars.shape[0] # initialise z appropriately true_memb_probs = np.zeros((np.sum(starcounts), ncomps)) start = 0 for i in range(ncomps): true_memb_probs[start:start + starcounts[i], i] = 1.0 start += starcounts[i] # Initialise some random membership probablities # which will serve as our starting guess init_memb_probs = np.random.rand(np.sum(starcounts), ncomps) # To aid a component in quickly becoming stable, initialse the memberships # correclty for stars belonging to this component init_memb_probs[:starcounts[0]] = 0. init_memb_probs[:starcounts[0], 0] = 1. init_memb_probs[starcounts[0]:, 0] = 0. # Normalising such that each row sums to 1 init_memb_probs = (init_memb_probs.T / init_memb_probs.sum(axis=1)).T synth_data = SynthData( pars=sphere_comp_pars, starcounts=starcounts, Components=SphereComponent, ) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table, write_table=True, filename=data_filename) origins = [SphereComponent(pars) for pars in sphere_comp_pars] SphereComponent.store_raw_components(savedir + 'origins.npy', origins) best_comps, med_and_spans, memb_probs = \ expectmax.fit_many_comps(data=synth_data.table, ncomps=ncomps, rdir=savedir, init_memb_probs=init_memb_probs, trace_orbit_func=dummy_trace_orbit_func, ignore_stable_comps=True) perm = expectmax.get_best_permutation(memb_probs, true_memb_probs) logging.info('Best permutation is: {}'.format(perm)) # Calculate the membership difference, we divide by 2 since # incorrectly allocated stars are double counted total_diff = 0.5 * np.sum(np.abs(true_memb_probs - memb_probs[:, perm])) # Assert that expected membership is less than 10% assert total_diff < 0.1 * np.sum(starcounts) for origin, best_comp in zip(origins, np.array(best_comps)[perm, ]): assert (isinstance(origin, SphereComponent) and isinstance(best_comp, SphereComponent)) o_pars = origin.get_pars() b_pars = best_comp.get_pars() logging.info("origin pars: {}".format(o_pars)) logging.info("best fit pars: {}".format(b_pars)) assert np.allclose(origin.get_mean(), best_comp.get_mean(), atol=5.) assert np.allclose(origin.get_sphere_dx(), best_comp.get_sphere_dx(), atol=2.) assert np.allclose(origin.get_sphere_dv(), best_comp.get_sphere_dv(), atol=2.) assert np.allclose(origin.get_age(), best_comp.get_age(), atol=1.)
def test_fit_many_comps(): """ Synthesise a file with negligible error, retrieve initial parameters Takes a while... maybe this belongs in integration unit_tests """ run_name = 'stationary' logging.info(60 * '-') logging.info(15 * '-' + '{:^30}'.format('TEST: ' + run_name) + 15 * '-') logging.info(60 * '-') savedir = 'temp_data/{}_expectmax_{}/'.format(PY_VERS, run_name) mkpath(savedir) data_filename = savedir + '{}_expectmax_{}_data.fits'.format( PY_VERS, run_name) log_filename = 'temp_data/{}_expectmax_{}/log.log'.format( PY_VERS, run_name) logging.basicConfig(level=logging.INFO, filemode='w', filename=log_filename) uniform_age = 1e-10 sphere_comp_pars = np.array([ # X, Y, Z, U, V, W, dX, dV, age, [-50, -50, -50, 0, 0, 0, 10., 5, uniform_age], [50, 50, 50, 0, 0, 0, 10., 5, uniform_age], ]) starcounts = [20, 50] ncomps = sphere_comp_pars.shape[0] # initialise z appropriately true_memb_probs = np.zeros((np.sum(starcounts), ncomps)) start = 0 for i in range(ncomps): true_memb_probs[start:start + starcounts[i], i] = 1.0 start += starcounts[i] # Initialise some random membership probablities # Normalising such that each row sums to 1 init_memb_probs = np.random.rand(np.sum(starcounts), ncomps) init_memb_probs = (init_memb_probs.T / init_memb_probs.sum(axis=1)).T synth_data = SynthData( pars=sphere_comp_pars, starcounts=starcounts, Components=SphereComponent, ) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table, write_table=True, filename=data_filename) origins = [SphereComponent(pars) for pars in sphere_comp_pars] best_comps, med_and_spans, memb_probs = \ expectmax.fit_many_comps(data=synth_data.table, ncomps=ncomps, rdir=savedir, init_memb_probs=init_memb_probs, trace_orbit_func=dummy_trace_orbit_func, ignore_stable_comps=False) perm = expectmax.get_best_permutation(memb_probs, true_memb_probs) logging.info('Best permutation is: {}'.format(perm)) assert np.allclose(true_memb_probs, memb_probs[:, perm]) for origin, best_comp in zip(origins, np.array(best_comps)[perm, ]): assert (isinstance(origin, SphereComponent) and isinstance(best_comp, SphereComponent)) o_pars = origin.get_pars() b_pars = best_comp.get_pars() logging.info("origin pars: {}".format(o_pars)) logging.info("best fit pars: {}".format(b_pars)) assert np.allclose(origin.get_mean(), best_comp.get_mean(), atol=5.) assert np.allclose(origin.get_sphere_dx(), best_comp.get_sphere_dx(), atol=2.) assert np.allclose(origin.get_sphere_dv(), best_comp.get_sphere_dv(), atol=2.) assert np.allclose(origin.get_age(), best_comp.get_age(), atol=1.)
def test_fit_one_comp_with_background(): """ Synthesise a file with negligible error, retrieve initial parameters Takes a while... Parameters ---------- """ run_name = 'background' logging.info(60 * '-') logging.info(15 * '-' + '{:^30}'.format('TEST: ' + run_name) + 15 * '-') logging.info(60 * '-') savedir = 'temp_data/{}_expectmax_{}/'.format(PY_VERS, run_name) mkpath(savedir) data_filename = savedir + '{}_expectmax_{}_data.fits'.format( PY_VERS, run_name) log_filename = 'temp_data/{}_expectmax_{}/log.log'.format( PY_VERS, run_name) logging.basicConfig(level=logging.INFO, filemode='w', filename=log_filename) uniform_age = 1e-10 sphere_comp_pars = np.array([ # X, Y, Z, U, V, W, dX, dV, age, [0, 0, 0, 0, 0, 0, 10., 5, uniform_age], ]) starcount = 200 background_density = 1e-9 ncomps = sphere_comp_pars.shape[0] # true_memb_probs = np.zeros((starcount, ncomps)) # true_memb_probs[:,0] = 1. synth_data = SynthData( pars=sphere_comp_pars, starcounts=[starcount], Components=SphereComponent, background_density=background_density, ) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table) background_count = len(synth_data.table) - starcount logging.info('Generated {} background stars'.format(background_count)) # insert background densities synth_data.table['background_log_overlap'] =\ len(synth_data.table) * [np.log(background_density)] synth_data.table.write(data_filename, overwrite=True) origins = [SphereComponent(pars) for pars in sphere_comp_pars] best_comps, med_and_spans, memb_probs = \ expectmax.fit_many_comps(data=synth_data.table, ncomps=ncomps, rdir=savedir, burnin=500, sampling_steps=5000, trace_orbit_func=dummy_trace_orbit_func, use_background=True, ignore_stable_comps=False, max_em_iterations=200) # return best_comps, med_and_spans, memb_probs # Check parameters are close assert np.allclose(sphere_comp_pars, best_comps[0].get_pars(), atol=1.5) # Check most assoc members are correctly classified recovery_count_threshold = 0.95 * starcount recovery_count_actual = np.sum(memb_probs[:starcount, 0] > 0.5) assert recovery_count_threshold < recovery_count_actual # Check most background stars are correctly classified # Number of bg stars classified as members should be less than 5% # of all background stars contamination_count_threshold = 0.05 * len(memb_probs[starcount:]) contamination_count_actual = np.sum(memb_probs[starcount:, 0] > 0.5) assert contamination_count_threshold > contamination_count_actual # Check reported membership probabilities are consistent with recovery # rate (within 5%) mean_membership_confidence = np.mean(memb_probs[:starcount, 0]) assert np.isclose(recovery_count_actual / starcount, mean_membership_confidence, atol=0.05)
import matplotlib.pyplot as plt from chronostar.component import SphereComponent from chronostar.traceorbit import trace_cartesian_orbit import numpy as np mean_now = np.array([0., 0., 30., 5., 5., 5.]) init_dx = 5. init_dv = 1. age = 100. mean_then = trace_cartesian_orbit(mean_now, times=-age) pars1 = np.hstack((mean_then, init_dx, init_dv, age)) comp1 = SphereComponent(pars1) print(comp1.get_pars()) labels = 'XYZUVW' units = 3 * ['pc'] + 3 * ['km/s'] for dim1, dim2 in [(0, 3), (1, 4), (2, 5)]: plt.clf() comp1.plot(dim1=dim1, dim2=dim2, comp_now=True, comp_then=True, comp_orbit=True) plt.xlabel('{} [{}]'.format(labels[dim1], units[dim1])) plt.ylabel('{} [{}]'.format(labels[dim2], units[dim2])) plt.savefig('../plots/simple_plot_{}{}.pdf'.format(labels[dim1],
# Initialise all stars in dataset to be full members of first component using_bg = config.config.get('include_background_distribution', False) init_memb_probs = np.zeros((len(data_dict['means']), 1 + using_bg)) init_memb_probs[:, 0] = 1. store_burnin_chains = config.advanced.get('store_burnin_chains', False) if store_burnin_chains: log_message(msg='Storing burnin chains', symbol='-') # Try and recover any results from previous run try: prev_med_and_spans = np.load(run_dir + 'final/' + final_med_and_spans_file) prev_memb_probs = np.load(run_dir + 'final/' + final_memb_probs_file) try: prev_comps = Component.load_raw_components( str(run_dir + 'final/' + final_comps_file)) # Final comps are there, they just can't be read by current module # so quickly fit them based on fixed prev membership probabilities except AttributeError: logging.info('Component class has been modified, reconstructing ' 'from chain') prev_comps = ncomps * [None] for i in range(ncomps): final_cdir = run_dir + 'final/comp{}/'.format(i) chain = np.load(final_cdir + 'final_chain.npy') lnprob = np.load(final_cdir + 'final_lnprob.npy') npars = len(Component.PARAMETER_FORMAT) best_ix = np.argmax(lnprob) best_pars = chain.reshape(-1, npars)[best_ix] prev_comps[i] = Component(emcee_pars=best_pars) Component.store_raw_components(
''' Noticed issue with calculating a component's current day mean. Behaviour of trace_cartesian_orbit varies based on whether a single age is provided vs a range of times ''' import numpy as np import sys sys.path.insert(0, '..') from chronostar.component import SphereComponent from chronostar.traceorbit import trace_cartesian_orbit bugged_comp = SphereComponent.load_raw_components('bugged_component.npy')[0] y_pos_now = bugged_comp.get_mean_now()[1] print('Mean then: {}'.format(bugged_comp.get_mean())) print('Internally calculated mean_now: {}'.format(bugged_comp.get_mean_now())) mean_then = bugged_comp.get_mean() age = bugged_comp.get_age() y_pos_linear = mean_then[1] + mean_then[4] * age print('Linear Y motion: {}'.format(y_pos_linear)) print('Difference of {}'.format(y_pos_now - y_pos_linear)) ts = np.linspace(0, bugged_comp.get_age(), 50) for method in [ 'odeint', 'symplec4_c', 'rk4_c', 'dopr54_c',
cd_med_and_spans[label] = np.load(chaindir + cd_file_stem) print('loaded for {}'.format(label)) except IOError: print('calculating for {}'.format(label)) # Convert chain of sampling origin to corresponding chain of current day flat_final_chain = np.load(chaindir + 'final_chain.npy').reshape(-1, 9) nsamples = len(flat_final_chain) # initialise empty array current_day_chain = np.zeros((nsamples, len(FreeComponent.PARAMETER_FORMAT))) # One by one, get equivalent pars of current day for ix, sample in enumerate(flat_final_chain): if ix % 100 == 0: print('{} of {} done'.format(ix, len(flat_final_chain))) comp = SphereComponent(emcee_pars=sample) cd_mean, cd_cov = comp.get_currentday_projection() cd_comp = FreeComponent(attributes={'mean':cd_mean, 'covmatrix':cd_cov, 'age':0.}) current_day_chain[ix] = cd_comp.get_pars() cd_med_and_spans[label] = compfitter.calc_med_and_span(current_day_chain) np.save(chaindir + cd_file_stem, cd_med_and_spans[label]) # cd_med_and_spans[label] = np.load(chaindir + 'cd_med_and_span.npy') # cd_med_and_spans[label][6:8] = np.exp(cd_med_and_spans[label][6:8]) origin_med_and_spans[label] = np.load(rdir + 'final_med_errs.npy')[comp_ix[label]] origin_med_and_spans[label][6:8] = np.exp(origin_med_and_spans[label][6:8]) zs = np.load(rdir + 'final_membership.npy')
""" import numpy as np import sys sys.path.insert(0, '..') from chronostar.component import SphereComponent from chronostar import tabletool from chronostar import expectmax from astropy.table import Table, vstack, join # Create components #c = np.load('all_nonbg_scocen_comps.npy') # including LCC #comps = [SphereComponent(pars=x) for x in c] comps = SphereComponent.load_raw_components('all_nonbg_scocen_comps.npy') print('components', c.shape) print('Are there duplicate components?') #datafile = 'data_table_cartesian_with_bg_ols.fits' datafile = 'data_table_cartesian_including_tims_stars_with_bg_ols.fits' # Read Gaia data including both stars with known and missing radial velocities #datafile = 'data_table_cartesian_100k.fits' data_table = tabletool.read(datafile) # This table is masked. Unmask: data_table = data_table.filled() print('DATA READ', len(data_table)) historical = 'c_XU' in data_table.colnames
return recent_lims else: try: return [np.min((current_lims[0], recent_lims[0])), np.max((current_lims[1], recent_lims[1]))] except: import pdb; pdb.set_trace() labels = 'XYZUVW' units = 3*['pc'] + 3*['km/s'] plt_dir = '../plots/scocen_formation/' all_comps_file = '../results/all_nonbg_scocen_comps.npy' # Reading in and tidying components all_comps = SphereComponent.load_raw_components(all_comps_file) # # Pop off some manually identified duplicates # all_comps.pop(9) # all_comps.pop(6) print('ages of components: {}'.format([c.get_age() for c in all_comps])) max_age = np.max([c.get_age() for c in all_comps]) ntimes = 100 times = np.linspace(max_age, 0, ntimes) #for time in times: time_ix = ntimes - ntimes/2
logging.basicConfig(filename=rdir + 'log.log', level=logging.INFO) log_message('Beginning Chronostar run', symbol='_', surround=True) log_message('Setting up', symbol='.', surround=True) assert os.access(rdir, os.W_OK) # ------------------------------------------------------------ # ----- SETTING UP ALL DATA PREP --------------------------- # ------------------------------------------------------------ # Set up some filename constants (INPUT files) FOR COMPARISON and expectmax final_comps_file_with_rv = '../data/no_rv_paper/beta_pic_sphere_component.npy' final_memb_probs_file_with_rv = '../data/no_rv_paper/beta_memb_probs.npy' bp_comp_with_rv = SphereComponent.load_components(final_comps_file_with_rv) bp_probs_with_rv = np.load(final_memb_probs_file_with_rv) print(bp_probs_with_rv) # Set up some filename constants (OUTPUT files) final_comps_file = 'final_comps.npy' final_med_and_spans_file = 'final_med_and_spans.npy' final_memb_probs_file = 'final_membership.npy' # TODO: data_loadfile: mask out rows with nans # First see if a data savefile path has been provided, and if # so, then just assume this script has already been performed # and the data prep has already been done if (config.config['data_savefile'] != '' and os.path.isfile(config.config['data_savefile'])):
def test_spherecomponent_initialisation(): sphere_comp = SphereComponent(pars=SPHERE_PARS) assert np.allclose(SPHERE_PARS[:6], sphere_comp._mean) assert np.allclose(AGE, sphere_comp._age) assert np.isclose(DX, sphere_comp.get_sphere_dx()) assert np.isclose(DV, sphere_comp.get_sphere_dv())
def test_2comps_and_background(): """ Synthesise a file with negligible error, retrieve initial parameters Takes a while... maybe this belongs in integration unit_tests Performance of test is a bit tricky to callibrate. Since we are skipping any temporal evolution for speed reasons, we model two isotropic Gaussians. Now if these Gaussians are too far apart, NaiveFit will gravitate to one of the Gaussians during the 1 component fit, and then struggle to discover the second Gaussian. If the Gaussians are too close, then both will be characteresied by the 1 component fit, and the BIC will decide two Gaussians components are overkill. I think I've addressed this by having the two groups have large number of stars. """ using_bg = True run_name = '2comps_and_background' logging.info(60 * '-') logging.info(15 * '-' + '{:^30}'.format('TEST: ' + run_name) + 15 * '-') logging.info(60 * '-') savedir = 'temp_data/{}_naive_{}/'.format(PY_VERS, run_name) mkpath(savedir) data_filename = savedir + '{}_naive_{}_data.fits'.format(PY_VERS, run_name) log_filename = 'temp_data/{}_naive_{}/log.log'.format(PY_VERS, run_name) logging.basicConfig(level=logging.INFO, filemode='w', filename=log_filename) ### INITIALISE SYNTHETIC DATA ### # DON'T CHANGE THE AGE! BECAUSE THIS TEST DOESN'T USE ANY ORBIT INTEGRATION!!! # Note: if peaks are too far apart, it will be difficult for # chronostar to identify the 2nd when moving from a 1-component # to a 2-component fit. uniform_age = 1e-10 sphere_comp_pars = np.array([ # X, Y, Z, U, V, W, dX, dV, age, [0, 0, 0, 0, 0, 0, 10., 5, uniform_age], [30, 0, 0, 0, 5, 0, 10., 5, uniform_age], ]) starcounts = [100, 150] ncomps = sphere_comp_pars.shape[0] nstars = np.sum(starcounts) background_density = 1e-9 # initialise z appropriately true_memb_probs = np.zeros((np.sum(starcounts), ncomps)) start = 0 for i in range(ncomps): true_memb_probs[start:start + starcounts[i], i] = 1.0 start += starcounts[i] try: # Check if the synth data has already been constructed data_dict = tabletool.build_data_dict_from_table(data_filename) except: synth_data = SynthData( pars=sphere_comp_pars, starcounts=starcounts, Components=SphereComponent, background_density=background_density, ) synth_data.synthesise_everything() tabletool.convert_table_astro2cart(synth_data.table, write_table=True, filename=data_filename) background_count = len(synth_data.table) - np.sum(starcounts) # insert background densities synth_data.table['background_log_overlap'] =\ len(synth_data.table) * [np.log(background_density)] synth_data.table.write(data_filename, overwrite=True) origins = [SphereComponent(pars) for pars in sphere_comp_pars] ### SET UP PARAMETER FILE ### fit_pars = { 'results_dir': savedir, 'data_table': data_filename, 'trace_orbit_func': 'dummy_trace_orbit_func', 'return_results': True, 'par_log_file': savedir + 'fit_pars.log', 'overwrite_prev_run': True, # 'nthreads':18, 'nthreads': 3, } ### INITIALISE AND RUN A NAIVE FIT ### naivefit = NaiveFit(fit_pars=fit_pars) result, score = naivefit.run_fit() best_comps = result['comps'] memb_probs = result['memb_probs'] # Check membership has ncomps + 1 (bg) columns n_fitted_comps = memb_probs.shape[-1] - 1 assert ncomps == n_fitted_comps ### CHECK RESULT ### # No guarantee of order, so check if result is permutated # also we drop the bg memberships for permutation reasons perm = expectmax.get_best_permutation(memb_probs[:nstars, :ncomps], true_memb_probs) memb_probs = memb_probs[:nstars] logging.info('Best permutation is: {}'.format(perm)) n_misclassified_stars = np.sum( np.abs(true_memb_probs - np.round(memb_probs[:, perm]))) # Check fewer than 15% of association stars are misclassified try: assert n_misclassified_stars / nstars * 100 < 15 except AssertionError: import pdb pdb.set_trace() for origin, best_comp in zip(origins, np.array(best_comps)[perm, ]): assert (isinstance(origin, SphereComponent) and isinstance(best_comp, SphereComponent)) o_pars = origin.get_pars() b_pars = best_comp.get_pars() logging.info("origin pars: {}".format(o_pars)) logging.info("best fit pars: {}".format(b_pars)) assert np.allclose(origin.get_mean(), best_comp.get_mean(), atol=5.) assert np.allclose(origin.get_sphere_dx(), best_comp.get_sphere_dx(), atol=2.5) assert np.allclose(origin.get_sphere_dv(), best_comp.get_sphere_dv(), atol=2.5) assert np.allclose(origin.get_age(), best_comp.get_age(), atol=1.)
log_message('Beginning Chronostar run', symbol='_', surround=True) log_message('Setting up', symbol='.', surround=True) assert os.access(rdir, os.W_OK) # ------------------------------------------------------------ # ----- SETTING UP ALL DATA PREP --------------------------- # ------------------------------------------------------------ # Set up some filename constants (INPUT files) FOR COMPARISON and expectmax final_comps_file_with_rv = '../data/no_rv_paper/beta_pic_sphere_component.npy' final_memb_probs_file_with_rv = '../data/no_rv_paper/beta_memb_probs.npy' bp_comp_with_rv = SphereComponent.load_components(final_comps_file_with_rv) bp_probs_with_rv = np.load(final_memb_probs_file_with_rv) print(bp_probs_with_rv) # Set up some filename constants (OUTPUT files) final_comps_file = 'final_comps.npy' final_med_and_spans_file = 'final_med_and_spans.npy' final_memb_probs_file = 'final_membership.npy' # TODO: data_loadfile: mask out rows with nans # First see if a data savefile path has been provided, and if # so, then just assume this script has already been performed # and the data prep has already been done if (config.config['data_savefile'] != '' and os.path.isfile(config.config['data_savefile'])):
historical = 'c_XU' in data_table.colnames ############################################################################ ############ COMPONENT OVERLAPS ############################################ ############################################################################ print('Create data dict') # Create data dict data_dict = tabletool.build_data_dict_from_table( data_table, get_background_overlaps=True, historical=historical, ) # Create components comps = SphereComponent.load_raw_components(comps_filename) # COMPONENT OVERLAPS overlaps = expectmax.get_all_lnoverlaps(data_dict, comps) print('overlaps.shape', overlaps.shape, len(comps)) # MEMBERSHIP PROBABILITIES membership_probabilities = np.array( [expectmax.calc_membership_probs(ol) for ol in overlaps]) # Create a table for i in range(membership_probabilities.shape[1] - 1): data_table['membership_%d' % (i + 1)] = membership_probabilities[:, i] data_table['membership_bg'] = membership_probabilities[:, -1] # Print data
h=1. * 10.**expon, args=(comp.get_age(), ))) return expons[np.where([ np.allclose(cov_mat, ref_cov_now, rtol=rtol, atol=atol) for cov_mat in covs_now ])] mean = np.zeros(6) # centre at LSR mean = np.array([20., -80., 25., -1.9, 11.76, 2.25]) dx = 10. dv = 2. age = 30. pars = np.hstack((mean, dx, dv, age)) comp = SphereComponent(pars=pars) results = {} import matplotlib.pyplot as plt for label, rtol in zip(['e-4', 'e-3', 'e-2', 'e-1'], [1e-4, 1e-3, 1e-2, 1e-1]): all_stable_expons = [] lo_age = 0 hi_age = 100 lo_expon = -10 hi_expon = 2 for age in range(lo_age, hi_age): pars = np.hstack((mean, dx, dv, age)) stable_expons = get_stable_expons(SphereComponent(pars), rtol=rtol,
# Fit the first component log_message(msg='FITTING {} COMPONENT'.format(ncomps), symbol='*', surround=True) run_dir = rdir + '{}/'.format(ncomps) # Initialise all stars in dataset to be full members of first component init_memb_probs = np.zeros((len(data_dict['means']),2)) init_memb_probs[:,0] = 1. # Try and recover any results from previous run try: prev_med_and_spans = np.load(run_dir + 'final/' + final_med_and_spans_file) prev_memb_probs = np.load(run_dir + 'final/' + final_memb_probs_file) try: prev_comps = Component.load_components( str(run_dir+'final/'+final_comps_file)) # Final comps are there, they just can't be read by current module # so quickly fit them based on fixed prev membership probabilities except AttributeError: logging.info('Component class has been modified, reconstructing ' 'from chain') prev_comps = ncomps * [None] for i in range(ncomps): final_cdir = run_dir + 'final/comp{}/'.format(i) chain = np.load(final_cdir + 'final_chain.npy') lnprob = np.load(final_cdir + 'final_lnprob.npy') npars = len(Component.PARAMETER_FORMAT) best_ix = np.argmax(lnprob) best_pars = chain.reshape(-1,npars)[best_ix] prev_comps[i] = Component(emcee_pars=best_pars) np.save(str(run_dir+'final/'+final_comps_file), prev_comps)