Example #1
0
def test_pythonFuncs():
    xyzuvw_file = "../data/fed_stars_20_xyzuvw.fits"
    xyzuvw_dict = gf.loadXYZUVW(xyzuvw_file)

    star_means = xyzuvw_dict['xyzuvw']
    star_covs = xyzuvw_dict['xyzuvw_cov']
    nstars = star_means.shape[0]

    group_mean = np.mean(star_means, axis=0)
    group_cov = np.cov(star_means.T)

    co1s = []
    co2s = []
    for i, (scov, smn) in enumerate(zip(star_covs, star_means)):
        print(i)
        co1s.append(co1(group_cov, group_mean, scov, smn))
        co2s.append(co2(group_cov, group_mean, scov, smn))
        ol.get_lnoverlaps(group_cov, group_mean,
                          np.array([scov]),
                          np.array([smn]), 1)
    co1s = np.array(co1s)
    co2s = np.array(co2s)
    co3s = np.exp(sclno(group_cov, group_mean, star_covs, star_means, nstars))
    assert np.allclose(co1s, co2s)
    assert np.allclose(co2s, co3s)
    assert np.allclose(co1s, co3s)

    # note that most overlaps go to 0, but the log overlaps retains the
    # information
    co1s = []
    co2s = []
    for i, (scov, smn) in enumerate(zip(star_covs, star_means)):
        co1s.append(co1(star_covs[15], star_means[15], scov, smn))
        co2s.append(co2(star_covs[15], star_means[15], scov, smn))
    co1s = np.array(co1s)
    co2s = np.array(co2s)
    lnos = sclno(star_covs[15], star_means[15], star_covs, star_means, 1)
    co3s = np.exp(lnos)
    assert np.allclose(co1s, co2s)
    assert np.allclose(co2s, co3s)
    assert np.allclose(co1s, co3s)
# coding: utf-8
get_ipython().magic(u'run ipython_primer.py')
import numpy as np
import chronostar.compfitter as gf
xyzuvw_dict = gf.load("../data/gaia_dr2_ok_plx_xyzuvw.fits.gz.fits")
xyzuvw_dict = gf.loadXYZUVW("../data/gaia_dr2_ok_plx_xyzuvw.fits.gz.fits")
xyzuvw_dict.keys()
xyzuvw_dict['xyzuvw'].shape
xyzuvw_dict['xyzuvw_cov'].shape
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'ls ')
get_ipython().magic(u'rm nohup.out')
get_ipython().magic(u'rm log_gaia_converter.log')
get_ipython().magic(u'ls ')
6000 * 5
6000 * 5 / 60
6000 * 5 / 60 / 60
8 * 60
            #xyzuvw_file = res_dir + scen + "_xyzuvw_init.npy"
            xyzuvw_file = res_dir + scen + "_perf_xyzuvw.npy"
            init_xyzuvw = np.load(xyzuvw_file)
            astr_table = chronostar.synthdata.measureXYZUVW(init_xyzuvw, prec_val[prec])
            cv.convertMeasurementsToCartesian(t=astr_table,
                                              savefile=fits_file)



file_stems = [scen + "_" + prec for scen in scenarios for prec in precs]

#fit_files = [res_dir + "xyzuvw_now_" + stem + ".fits" for stem in file_stems]
assert len(fits_files) == len(file_stems)

for i in range(len(fits_files)):
    xyzuvw_dict = gf.loadXYZUVW(fits_files[i])
    nstars = xyzuvw_dict['xyzuvw'].shape[0]
    origin_file = res_dir + file_stems[i][:-5] + '_origins.npy'
    group = np.load(origin_file).item()
    true_age = group.age
    ntimes = int(2 * true_age + 1)
    times = -np.linspace(0, 2 * true_age, ntimes)
    tb = torb.trace_many_cartesian_orbit(xyzuvw_dict['xyzuvw'], times=times,
                                         single_age=False,
                                         savefile=res_dir + 'tb_{}.npy'. \
                                         format(int(true_age)))

    # for each timestep, get mean of association and distance from mean
    dists = np.zeros((nstars, ntimes))
    stds = np.zeros(ntimes)
    for tix in range(ntimes):
from __future__ import division, print_function
"""
Generate some BPMG plots for talk
"""

import sys
sys.path.insert(0, "..")

import chronostar.retired.hexplotter as hp
import chronostar.compfitter as gf

xyzuvw_file = "../data/gaia_dr2_bp_xyzuvw.fits"
rdir = "../results/em_fit/gaia_dr2_bp/iter11/"

star_pars = gf.loadXYZUVW(xyzuvw_file)
#final_z = np.load(rdir + "final_groups.npy")
#final_med_errs = np.load(rdir + "final_med_errs.npy")
#final_groups = np.load(rdir + "final_groups.npy")

hp.dataGathererEM(2, 10, rdir, rdir, xyzuvw_file=xyzuvw_file)


if not os.path.exists(rdir_stem):  # on server
    rdir_stem = "/data/mash/tcrun/em_fit/"
    
for ncomps in range(1,4):
    try:
        rdir = rdir_stem + assoc_name + "_{}/".format(ncomps)
        ddir = "../data/"


        # final_z_file = rdir + "final/final_membership.npy"
        final_z_file = rdir + "memberships.npy"
        final_groups_file = rdir + "final_groups.npy"
        bg_hists_file = rdir + "bg_hists.npy"
        data_file = ddir + assoc_name + "_xyzuvw.fits"
        star_pars = gf.loadXYZUVW(data_file)

        z = np.load(final_z_file)
        # import pdb; pdb.set_trace()
        groups = np.load(final_groups_file)
        bg_hists = np.load(bg_hists_file)

        bg_ln_ols = em.backgroundLogOverlaps(star_pars['xyzuvw'], bg_hists)

        overall_lnlike, z = em.get_overall_lnlikelihood(star_pars, groups,
                                                        bg_ln_ols, return_memb_probs=True)
        print("overall_lnlike with {} comps is: {:.5}".format(ncomps, overall_lnlike))
        bic = calcBIC(star_pars, ncomps, overall_lnlike)
        print("BIC is: {}".format(bic))
        print("With {:.2} stars accounted for by background"\
              .format(np.sum(z[:,-1])))
rdir_stem = "../results/em_fit/"

if not os.path.exists(rdir_stem):  # on server
    rdir_stem = "/data/mash/tcrun/em_fit/"

for ncomps in range(1, 4):
    try:
        rdir = rdir_stem + assoc_name + "_{}/".format(ncomps)
        ddir = "../data/"

        # final_z_file = rdir + "final/final_membership.npy"
        final_z_file = rdir + "memberships.npy"
        final_groups_file = rdir + "final_groups.npy"
        bg_hists_file = rdir + "bg_hists.npy"
        data_file = ddir + assoc_name + "_xyzuvw.fits"
        star_pars = gf.loadXYZUVW(data_file)

        z = np.load(final_z_file)
        # import pdb; pdb.set_trace()
        groups = np.load(final_groups_file)
        bg_hists = np.load(bg_hists_file)

        bg_ln_ols = em.backgroundLogOverlaps(star_pars['xyzuvw'], bg_hists)

        overall_lnlike, z = em.get_overall_lnlikelihood(star_pars,
                                                        groups,
                                                        bg_ln_ols,
                                                        return_memb_probs=True)
        print("overall_lnlike with {} comps is: {:.5}".format(
            ncomps, overall_lnlike))
        bic = calcBIC(star_pars, ncomps, overall_lnlike)
# coding: utf-8
get_ipython().magic(u'run ipython_primer.py')
import numpy as np
import chronostar.compfitter as gf
xyzuvw_dict = gf.load("../data/gaia_dr2_ok_plx_xyzuvw.fits.gz.fits")
xyzuvw_dict = gf.loadXYZUVW ("../data/gaia_dr2_ok_plx_xyzuvw.fits.gz.fits")
xyzuvw_dict.keys()
xyzuvw_dict['xyzuvw'].shape
xyzuvw_dict['xyzuvw_cov'].shape
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'cat log_gaia_converter.log')
get_ipython().magic(u'ls ')
get_ipython().magic(u'rm nohup.out')
get_ipython().magic(u'rm log_gaia_converter.log')
get_ipython().magic(u'ls ')
6000*5
6000*5/60
6000*5/60/60
8*60
                                        weight)
        lnalpha_priors.append(lnalpha_prior)
        print("nstars: {:6.3f} | age: {:6.3f} | dX: {:6.3f} | dV: {:6.3f} |"
              "lnalpha_pr: {:6.3f}"\
              .format(weight, group_obj.age, group_obj.dx, group_obj.dv,
                      lnalpha_prior))
    #print(lnalpha_priors)

# for deeper insight, lets investigate the ratio of overlap between
# the flat bg field and the crappy 1.5 Myr component (3_comp[1]) and
# see if the difference will be corrected for by the large prior

rdir = "../results/em_fit/cf-15/"
star_pars_file = "../data/bpmg_cand_w_gaia_dr2_astrometry_comb_binars_xyzuvw.fits"

star_pars = gf.loadXYZUVW(star_pars_file)
bg_hists = np.load(rdir + "bg_hists.npy")
final_z = np.load(rdir + "final/final_membership.npy")
final_groups = np.load(rdir + "final_groups.npy")

for i in range(len(three_group_pars_ex)):
    spec_comp_stars_mask = np.where(final_z[:, i] > .5)
    spec_comp_star_pars = {
        'xyzuvw': star_pars['xyzuvw'][spec_comp_stars_mask],
        'xyzuvw_cov': star_pars['xyzuvw_cov'][spec_comp_stars_mask]
    }
    spec_comp_group = chronostar.component.Component(three_group_pars_ex[1],
                                                     internal=False)

    spec_ln_bg_ols = em.backgroundLogOverlaps(spec_comp_star_pars['xyzuvw'],
                                              bg_hists,
import logging
import sys

sys.path.insert(0, '..')

import chronostar.retired2.converter as cv
import chronostar.compfitter as gf

if __name__ == '__main__':
    logging.basicConfig(level=logging.INFO, stream=sys.stdout)
    temp_dir = 'temp_data/'
    astro_table_file = temp_dir + 'astro_table.txt'
    temp_xyzuvw_save_file = temp_dir + 'xyzuvw_now.fits'

    xyzuvw_dict_orig = cv.convertMeasurementsToCartesian(
        loadfile=astro_table_file, savefile=temp_xyzuvw_save_file
    )

    xyzuvw_dict_load = gf.loadXYZUVW(temp_xyzuvw_save_file)
Example #10
0
from __future__ import division, print_function
"""
Generate some BPMG plots for talk
"""

import sys

sys.path.insert(0, "..")

import chronostar.retired.hexplotter as hp
import chronostar.compfitter as gf

xyzuvw_file = "../data/gaia_dr2_bp_xyzuvw.fits"
rdir = "../results/em_fit/gaia_dr2_bp/iter11/"

star_pars = gf.loadXYZUVW(xyzuvw_file)
#final_z = np.load(rdir + "final_groups.npy")
#final_med_errs = np.load(rdir + "final_med_errs.npy")
#final_groups = np.load(rdir + "final_groups.npy")

hp.dataGathererEM(2, 10, rdir, rdir, xyzuvw_file=xyzuvw_file)
Example #11
0
                pass
        except IOError:
            #xyzuvw_file = res_dir + scen + "_xyzuvw_init.npy"
            xyzuvw_file = res_dir + scen + "_perf_xyzuvw.npy"
            init_xyzuvw = np.load(xyzuvw_file)
            astr_table = chronostar.synthdata.measureXYZUVW(
                init_xyzuvw, prec_val[prec])
            cv.convertMeasurementsToCartesian(t=astr_table, savefile=fits_file)

file_stems = [scen + "_" + prec for scen in scenarios for prec in precs]

#fit_files = [res_dir + "xyzuvw_now_" + stem + ".fits" for stem in file_stems]
assert len(fits_files) == len(file_stems)

for i in range(len(fits_files)):
    xyzuvw_dict = gf.loadXYZUVW(fits_files[i])
    nstars = xyzuvw_dict['xyzuvw'].shape[0]
    origin_file = res_dir + file_stems[i][:-5] + '_origins.npy'
    group = np.load(origin_file).item()
    true_age = group.age
    ntimes = int(2 * true_age + 1)
    times = -np.linspace(0, 2 * true_age, ntimes)
    tb = torb.trace_many_cartesian_orbit(xyzuvw_dict['xyzuvw'], times=times,
                                         single_age=False,
                                         savefile=res_dir + 'tb_{}.npy'. \
                                         format(int(true_age)))

    # for each timestep, get mean of association and distance from mean
    dists = np.zeros((nstars, ntimes))
    stds = np.zeros(ntimes)
    for tix in range(ntimes):
import numpy as np
import sys
sys.path.insert(0, '..')

import integration_tests.traceback_plotter as tp
import chronostar.compfitter as gf

rdir = '../results/em_fit/gaia_dr2_bp/'
sdir = rdir
xyzuvw_now_file = '../data/gaia_dr2_bp_xyzuvw.fits'
final_z_file = rdir + 'final/final_membership.npy'

maxtime = 30
ntimes = maxtime + 1
times = np.linspace(0, np.int(-maxtime), ntimes)
final_z = np.load(final_z_file)

xyzuvw_dict = gf.loadXYZUVW(xyzuvw_now_file)
tp.plotSeparation(xyzuvw_dict['xyzuvw'][np.where(final_z[:, 0])],
                  times,
                  prec='real-data')

mc_xyzuvws = np.zeros((0, 6))
nsamples = 10
for (mn, cov) in zip(xyzuvw_dict['xyzuvw'][np.where(final_z[:, 0])],
                     xyzuvw_dict['xyzuvw_cov']):
    samples = np.random.multivariate_normal(mn, cov, size=nsamples)
    mc_xyzuvws = np.vstack((mc_xyzuvws, samples))

tp.plotSeparation(mc_xyzuvws, times, prec='real-data' + '_emcee')
        lnalpha_priors.append(lnalpha_prior)
        print("nstars: {:6.3f} | age: {:6.3f} | dX: {:6.3f} | dV: {:6.3f} |"
              "lnalpha_pr: {:6.3f}"\
              .format(weight, group_obj.age, group_obj.dx, group_obj.dv,
                      lnalpha_prior))
    #print(lnalpha_priors)


# for deeper insight, lets investigate the ratio of overlap between
# the flat bg field and the crappy 1.5 Myr component (3_comp[1]) and
# see if the difference will be corrected for by the large prior

rdir = "../results/em_fit/cf-15/"
star_pars_file = "../data/bpmg_cand_w_gaia_dr2_astrometry_comb_binars_xyzuvw.fits"

star_pars = gf.loadXYZUVW(star_pars_file)
bg_hists = np.load(rdir + "bg_hists.npy")
final_z = np.load(rdir + "final/final_membership.npy")
final_groups = np.load(rdir + "final_groups.npy")

for i in range(len(three_group_pars_ex)):
    spec_comp_stars_mask = np.where(final_z[:,i] > .5)
    spec_comp_star_pars = {'xyzuvw':star_pars['xyzuvw'][spec_comp_stars_mask],
                          'xyzuvw_cov':star_pars['xyzuvw_cov'][
                              spec_comp_stars_mask]}
    spec_comp_group = chronostar.component.Component(three_group_pars_ex[1],
                                                     internal=False)

    spec_ln_bg_ols = em.backgroundLogOverlaps(spec_comp_star_pars['xyzuvw'], bg_hists,
                                         correction_factor=1.)
import logging
import sys

sys.path.insert(0, '..')

import chronostar.retired2.converter as cv
import chronostar.compfitter as gf

if __name__ == '__main__':
    logging.basicConfig(level=logging.INFO, stream=sys.stdout)
    temp_dir = 'temp_data/'
    astro_table_file = temp_dir + 'astro_table.txt'
    temp_xyzuvw_save_file = temp_dir + 'xyzuvw_now.fits'

    xyzuvw_dict_orig = cv.convertMeasurementsToCartesian(
        loadfile=astro_table_file, savefile=temp_xyzuvw_save_file)

    xyzuvw_dict_load = gf.loadXYZUVW(temp_xyzuvw_save_file)
Example #15
0
    co2s = np.array(co2s)
    lnos = sclno(star_covs[15], star_means[15], star_covs, star_means, 1)
    co3s = np.exp(lnos)
    assert np.allclose(co1s, co2s)
    assert np.allclose(co2s, co3s)
    assert np.allclose(co1s, co3s)


if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
    DEBUG_SINGULAR = False
    DEBUG_UNDERFLOW = True
    co1s = []
    co2s = []
    xyzuvw_file = "../data/fed_stars_20_xyzuvw.fits"
    xyzuvw_dict = gf.loadXYZUVW(xyzuvw_file)

    star_means = xyzuvw_dict['xyzuvw']
    star_covs = xyzuvw_dict['xyzuvw_cov']
    nstars = star_means.shape[0]

    gmn = np.mean(star_means, axis=0)
    gcov = np.cov(star_means.T)

    if DEBUG_SINGULAR:
        scov = star_covs[20]
        smn  = star_means[20]
        py_lnol = sclno(gcov, gmn, np.array([scov]), np.array([smn]), 1)
        ol.get_lnoverlaps(gcov, gmn,
                          np.array([scov]),
                          np.array([smn]), 1)