def calculate_V_minus_K(s, i, dartmouth=False): """ Using the isochrones package, calculate the V-K color of a star using data from the KIC. Takes a pandas dataframe and the index of the star in the dataframe. Returns V-K for that star. """ if dartmouth: grid = g_dar else: grid = mist mod = StarModel(grid, Teff=(s.teff.values[i], s.teff.values[i]), logg=(s.logg.values[i], s.logg_err1.values[i]), feh=(s.feh.values[i], s.feh_err1.values[i]), J=s.jmag.values[i], K=s.kmag.values[i], H=s.hmag.values[i], Kp=s.kepmag.values[i], parallax=(s.parallax.values[i], s.parallax_error.values[i]), use_emcee=True) mod.fit() if dartmouth: return np.median(mod.samples.V_mag) - np.median(mod.samples.Ks_mag) else: return np.median(mod.samples.V_mag) - np.median(mod.samples.K_mag)
def test_lnprob_higher_likelihood_real(): """ The same test as above but for simulated data. """ df = pd.read_csv("../paper/code/data/simulated_data.csv") teff_err = 25 # Kelvin logg_err = .05 # dex feh_err = .05 # dex jmag_err = .01 # mags hmag_err = .01 # mags kmag_err = .01 # mags parallax_err = .05 # milliarcseconds prot_err = 1 # Days BV_err = .01 # mags i = 0 iso_params = pd.DataFrame(dict({"teff": (df.teff[i], 25), "logg": (df.logg[i], .05), "feh": (df.feh[i], .05), "jmag": (df.jmag[i], .01), "hmag": (df.hmag[i], .01), "kmag": (df.kmag[i], .01), "parallax": (df.parallax[i], .05)})) # Set up the StarModel isochrones object. mod = StarModel(mist, **iso_params) args = [mod, df.prot[i], 1, None, None, False, False] # lnprob arguments good_lnparams = [df.eep.values[i], df.age.values[i], df.feh.values[i], np.log(df.d_kpc.values[i]*1e3), df.Av.values[i]] good_lnprob = lnprob(good_lnparams, *args) good_vs_bad(good_lnprob, good_lnparams, args, 10)
def test_for_nans(): """ Something is causing the lhf to return NaN. Get to the bottom of it! """ df = pd.read_csv("../paper/code/data/simulated_data.csv") for i in range(len(df)): print(i, "of", len(df)) iso_params = pd.DataFrame( dict({ "teff": (df.teff[i], 10), "logg": (df.logg[i], .05), "feh": (df.feh[i], .001), "parallax": (df.parallax[i], .01) })) # mas mod = StarModel(mist, **iso_params) N = 10000 eeps = np.random.uniform(-100, 2000, N) lnages = np.random.uniform(0, 11, N) fehs = np.random.uniform(-5, 5, N) Ds = np.log(np.random.uniform(0, 10000, N)) Avs = np.random.uniform(-.2, 1.2, N) # periods = 10**np.random.uniform(-1, 3, N) probs, priors = [np.empty(N) for i in range(2)] for j in trange(N): lnparams = [eeps[j], lnages[j], fehs[j], Ds[j], Avs[j]] args = [mod, df.prot[i], 1., None, None, False, False] probs[j] = lnprob(lnparams, *args)[0] priors[j] = lnprob(lnparams, *args)[1] print(len(probs), len(probs[np.isnan(probs)])) assert sum(np.isnan(probs)) == 0
def test_on_hot_star(): df = pd.read_csv("paper/code/data/simulated_data_noisy.csv") i = 21 iso_params = dict({ "teff": (df.teff.values[i], df.teff_err.values[i]), "logg": (df.logg.values[i], df.logg_err.values[i]), "feh": (df.feh.values[i], df.feh_err.values[i]), "J": (df.jmag.values[i], df.J_err.values[i]), "H": (df.hmag.values[i], df.H_err.values[i]), "K": (df.kmag.values[i], df.K_err.values[i]), "B": (df.B.values[i], df.B_err.values[i]), "V": (df.V.values[i], df.V_err.values[i]), "G": (df.G.values[i], df.G_err.values[i]), "BP": (df.BP.values[i], df.BP_err.values[i]), "RP": (df.RP.values[i], df.RP_err.values[i]), "parallax": (df.parallax.values[i], df.parallax_err.values[i]), "maxAV": .1 }) lnparams = [329.58, np.log10(650 * 1e6), 0., np.log(177), .035] mod = StarModel(mist, **iso_params) args = [ mod, df.prot.values[i], df.prot_err.values[i], False, False, True, "angus15" ] prob, prior = lnprob(lnparams, *args) print(prob, prior) assert np.isfinite(prob) assert np.isfinite(prior)
def age(name): outfn = os.path.join(OUTPUTDIR, '{}_isochrones.hdf'.format(name)) model = StarModel.load_hdf(outfn) age = model.samples['age'] age = 10**age / 1e9 p15, p50, p85 = age.quantile([0.15, 0.5, 0.85]) return "${%.1f}^{%+.1f}_{%+.1f}$" % (p50, p85 - p50, p15 - p50)
def obs_to_starmodel(obs, iso=None): """ Given an Observation, return a StarModel object """ if iso is None: iso = Dartmouth_Isochrone() mags = obs_to_mags(obs) parallax = (obs.tgas_source.parallax, obs.tgas_source.parallax_error) model = StarModel(iso, use_emcee=True, parallax=parallax, **mags) model.set_bounds(mass=(0.01, 20), feh=(-1, 1), distance=(0, 300), AV=(0, 1)) return model
def _ini1(ic): """ Single star """ mod = StarModel.from_ini(ic, folder=os.path.join(FOLDER, 'star1')) assert mod.n_params == 5 assert mod.obs.systems == [0] assert mod.obs.Nstars == {0: 1} p = [1.0, 9.4, 0.0, 100, 0.2] assert np.isfinite(mod.lnlike(p))
def _ini2(ic): """ A wide, well-resolved binary """ mod = StarModel.from_ini(ic, folder=os.path.join(FOLDER, 'star2')) assert mod.n_params == 6 assert mod.obs.systems == [0] assert mod.obs.Nstars == {0: 2} p = [1.0, 0.5, 9.4, 0.0, 100, 0.2] assert np.isfinite(mod.lnlike(p))
def _check_saving(mod): filename = os.path.join(chainsdir, '{}.h5'.format(np.random.randint(1000000))) mod.save_hdf(filename) newmod = StarModel.load_hdf(filename) assert np.allclose(mod.samples, newmod.samples) assert mod.ic.bands == newmod.ic.bands os.remove(filename)
def _ini3(ic): """ A close resolved triple (unresolved in KIC, TwoMASS) modeled as a physically associated triple """ mod = StarModel.from_ini(ic, folder=os.path.join(FOLDER, 'star3')) assert mod.n_params == 7 assert mod.obs.systems == [0] assert mod.obs.Nstars == {0: 3} p = [1.0, 0.8, 0.5, 9.4, 0.0, 100, 0.2] assert np.isfinite(mod.lnlike(p))
def augment_chain(chain, isochrones_file, lpar_list): model = StarModel.load_hdf(isochrones_file) chain_star = model.samples chain_star['agegyr'] = 10**chain_star['age'] / 1e9 chain_star = chain_star['Teff mass radius agegyr distance'.split()] chain_star['rhostar'] = 1.410 * chain_star.mass * chain_star.radius**-3 nchain = len(chain) chain_star = chain_star.sample(n=nchain, replace=True) chain_star.index = chain.index chain = pd.concat([chain_star, chain], axis=1) if list(chain.columns).count('dvdt') > 0: chain['dvdt'] *= 365 # Fitting is done in m/s/day, we want m/s/yr i_planet = 1 for lpar in lpar_list: e = chain['e%i' % i_planet] k = chain['k%i' % i_planet] RpRstar = np.random.normal( loc=lpar.RpRstar, scale=lpar.RpRstar_err, size=nchain) * 1e-2 P = np.random.normal(loc=lpar.P, scale=lpar.P_err, size=nchain) T0 = np.random.normal(loc=lpar.T0, scale=lpar.T0_err, size=nchain) Rp = chain.radius * 109.045 * RpRstar Mpsini = radvel.orbit.Msini(k, P, chain['mass'], e, Msini_units='earth') rhop = radvel.orbit.density(Mpsini, Rp) Lstar = radvel.orbit.Lstar(chain['radius'], chain['Teff']) a = ((c.G * chain['mass'] * u.Msun * (P * u.d)**2 / 4 / np.pi**2)**(1 / 3.)).to(u.AU) a = a.to(u.AU).value Sinc = radvel.orbit.Sinc(Lstar, a) Teq = radvel.orbit.Teq(Sinc) chain['P%i' % i_planet] = P chain['T0%i' % i_planet] = T0 chain['RpRstar%i' % i_planet] = RpRstar chain['Rp%i' % i_planet] = Rp chain['Mpsini%i' % i_planet] = Mpsini chain['rhop%i' % i_planet] = rhop chain['Teq%i' % i_planet] = Teq chain['Lstar%i' % i_planet] = Lstar chain['a%i' % i_planet] = a chain['Sinc%i' % i_planet] = Sinc i_planet += 1 print_fmt_chain(fmt_chain(chain)) return chain
def _check_saving(mod): filename = os.path.join(chainsdir, "{}.h5".format(np.random.randint(1000000))) mod.save_hdf(filename) assert len(tb.file._open_files.get_handlers_by_name(filename)) == 0 newmod = StarModel.load_hdf(filename) assert len(tb.file._open_files.get_handlers_by_name(filename)) == 0 assert_frame_equal(mod.samples, newmod.samples) assert mod.ic.bands == newmod.ic.bands os.remove(filename)
def _check_saving(mod): filename = os.path.join(chainsdir, '{}.h5'.format(np.random.randint(1000000))) mod.save_hdf(filename) assert len(tb.file._open_files.get_handlers_by_name(filename)) == 0 newmod = StarModel.load_hdf(filename) assert len(tb.file._open_files.get_handlers_by_name(filename)) == 0 assert_frame_equal(mod.samples, newmod.samples) assert mod.ic.bands == newmod.ic.bands os.remove(filename)
def _ini3_2(ic): """ A close resolved triple (unresolved in KIC, TwoMASS) modeled as a physically associated binary plus non-associated single """ mod = StarModel.from_ini(ic, folder=os.path.join(FOLDER, 'star3'), index=[0, 0, 1]) assert mod.n_params == 11 assert mod.obs.systems == [0, 1] assert mod.obs.Nstars == {0: 2, 1: 1} p = [1.0, 0.8, 9.4, 0.0, 100, 0.2, 1.0, 9.7, 0.0, 200, 0.5] assert np.isfinite(mod.lnlike(p))
def test_lnprob_higher_likelihood_sun(): """ Make sure the likelihood goes down when the parameters are a worse fit. Test on Solar values. """ iso_params = pd.DataFrame(dict({"teff": (5777, 10), "logg": (4.44, .05), "feh": (0., .001), "parallax": (1., .01), # mas "B": (15, 0.02)})) # Set up the StarModel isochrones object. mod = StarModel(mist, **iso_params) args = [mod, 26., 1., None, None, False, False] # the lnprob arguments] good_lnparams = [346, np.log10(4.56*1e9), 0., np.log(1000), 0.] good_lnprob = lnprob(good_lnparams, *args) good_vs_bad(good_lnprob, good_lnparams, args, 10)
def test_praesepe_angus_model(): df = pd.read_csv("paper/code/data/praesepe.csv") df = df.iloc[0] iso_params = { "G": (df["G"], df["G_err"]), "BP": (df["bp"], df["BP_err"]), "RP": (df["rp"], df["RP_err"]), "parallax": (df["parallax"], df["parallax_err"]), "maxAV": .1 } inits = [330, np.log10(650 * 1e6), 0., np.log(177), 0.035] mod = StarModel(mist, **iso_params) args = [mod, df["prot"], df["prot"] * .01, False, False, True, "angus15"] prob, prior = lnprob(inits, *args) assert np.isfinite(prob) assert np.isfinite(prior)
def test_lnprob_higher_likelihood_sun(): """ Make sure the likelihood goes down when the parameters are a worse fit. Test on Solar values. """ iso_params = { "teff": (5777, 10), "logg": (4.44, .05), "feh": (0., .001), "parallax": (1., .01), # milliarcseconds "B": (15.48, 0.02) } # Set up the StarModel isochrones object. mod = StarModel(mist, **iso_params) # the lnprob arguments] args = [mod, 26., 1., False, False, True, "angus15"] good_lnparams = [346, np.log10(4.56 * 1e9), 0., np.log(1000), 0.] good_lnprob = lnprob(good_lnparams, *args) good_vs_bad(good_lnprob, good_lnparams, args, 10)
def isochrones(name): dar = Dartmouth_Isochrone() Teff, logg, feh, vsini, vmag = parameters(name) model = StarModel( dar, Teff=Teff, feh=feh, logg=logg, V=vmag, ) outfn = os.path.join(OUTPUTDIR, '{}_isochrones.hdf'.format(name)) print "performing isochrones analysis on {}".format(name) model.fit(overwrite=True) columns = 'Teff logg feh mass radius age distance'.split() print "performing isochrones analysis on {}".format(name) print model.samples[columns].quantile([0.15, 0.5, 0.85]).T.to_string() model.save_hdf(outfn)
def colour2physical(kid, plot=False): # fetch photometry client = kplr.API() star = client.star(kid) mags = {'H': (star.kic_hmag, 0.08), 'J': (star.kic_jmag, 0.08), 'K': (star.kic_kmag, 0.08), 'g':(star.kic_gmag, 0.08), 'r':(star.kic_rmag, 0.08)} # find the mass and radius using isochrones dar = Dartmouth_Isochrone() smod_phot = StarModel(dar, **mags) result = smod_phot.fit_mcmc() # save samples and triangle plot if plot: smod_phot.triangle_plots("test", format="pdf") smod_phot.save_hdf("samples_%s.h5" % str(int(kid))) max_like = smod_phot.maxlike() print max_like np.savetxt("maxlike.txt", max_like)
def fit(self, inits=[355, 9.659, 0., 1000., .01], nwalkers=24, max_n=100000, thin_by=100, burnin=0, iso_only=False, gyro_only=False): """Run MCMC on a star. Explore the posterior probability density function of the stellar parameters using MCMC (via emcee). Args: inits (Optional[array-like]): A list of initial values to use for EEP, age (in log10[yrs]), feh, distance (in pc) and Av. The defaults are Solar values at 1000 pc with .01 extinction. nwalkers (Optional[int]): The number of walkers to use with emcee. The default is 24. max_n (Optional[int]): The maximum number of samples to obtain (although not necessarily to save -- see thin_by). The default is 100000. thin_by (Optional[int]): Only one in every thin_by samples will be saved. The default is 100. Set = 1 to save every sample (note this substantially slows down the MCMC process because of the additional I/O time. burnin (Optional[int]): The number of SAVED samples to throw away when accessing the results. This number cannot exceed the number of saved samples (which is max_n/thin_by). Default = 0. iso_only (Optional[bool]): If true only the isochronal likelihood function will be used. gyro_only (Optional[bool]): If true only the gyrochronal likelihood function will be used. Cannot be true if iso_only is true. """ self.max_n = max_n self.nwalkers = nwalkers self.thin_by = thin_by if iso_only: assert gyro_only == False, "You cannot set both iso_only and "\ "gyro_only to be True." if gyro_only: assert mass, "If gyro_only is set to True, you must " \ "provide a B-V colour and a mass." if burnin > max_n / thin_by: burnin = int(max_n / thin_by / 3) print("Automatically setting burn in to {}".format(burnin)) p_init = [inits[0], inits[1], inits[2], np.log(inits[3]), inits[4]] self.p_init = p_init np.random.seed(42) # Create the directory if it doesn't already exist. if not os.path.exists(self.savedir): os.makedirs(self.savedir) # Set up the backend # Don't forget to clear it in case the file already exists fn = "{0}/{1}.h5".format(self.savedir, self.filename) backend = emcee.backends.HDFBackend(fn) nwalkers, ndim = 24, 5 backend.reset(nwalkers, ndim) self.backend = backend # Set up the StarModel object needed to calculate the likelihood. mod = StarModel(mist, **self.iso_params) # StarModel isochrones obj # lnprob arguments args = [ mod, self.prot, self.prot_err, self.bv, self.mass, iso_only, gyro_only ] self.args = args # Run the MCMC # sampler = run_mcmc(args, p_init, backend, nwalkers=nwalkers, # max_n=max_n, thin_by=thin_by) sampler = self.run_mcmc() self.sampler = sampler
def main(): # TODO: bad, hard-coded... # base_path = '/Volumes/ProjectData/gaia-comoving-followup/' base_path = '../../data/' db_path = path.join(base_path, 'db.sqlite') engine = db_connect(db_path) session = Session() chain_path = path.abspath('./isochrone_chains') os.makedirs(chain_path, exist_ok=True) # Check out the bottom of "Color-magnitude diagram.ipynb": interesting_group_ids = [1500, 1229, 1515] all_photometry = OrderedDict([ ('1500-8455', OrderedDict([('J', (6.8379998, 0.021)), ('H', (6.4640002, 0.017000001)), ('K', (6.3369999, 0.017999999)), ('W1', (6.2950001, 0.093000002)), ('W2', (6.2490001, 0.026000001)), ('W3', (6.3330002, 0.015)), ('B', (9.5950003, 0.022)), ('V', (8.5120001, 0.014))])), ('1500-1804', OrderedDict([('J', (6.9039998, 0.041000001)), ('H', (6.8559999, 0.027000001)), ('K', (6.7989998, 0.017000001)), ('W1', (6.803, 0.064999998)), ('W2', (6.7600002, 0.018999999)), ('W3', (6.8270001, 0.016000001)), ('B', (7.4980001, 0.015)), ('V', (7.289, 0.011))])), ('1229-1366', OrderedDict([('J', (6.7290001, 0.024)), ('H', (6.2449999, 0.02)), ('K', (6.1529999, 0.023)), ('W1', (6.1799998, 0.096000001)), ('W2', (6.04, 0.035)), ('W3', (6.132, 0.016000001)), ('B', (9.5539999, 0.021)), ('V', (8.4619999, 0.014))])), ('1229-7470', OrderedDict([ ('J', (9.1709995, 0.024)), ('H', (8.7959995, 0.026000001)), ('K', (8.7299995, 0.022)), ('W1', (8.6669998, 0.023)), ('W2', (8.7189999, 0.02)), ('W3', (8.6680002, 0.025)), ('B', (11.428, 0.054000001)), ('V', (10.614, 0.039999999)) ])), ('1515-3584', OrderedDict([('J', (5.363999843597412, 0.024000000208616257)), ('H', (4.965000152587891, 0.035999998450279236)), ('K', (4.815999984741211, 0.032999999821186066)), ('W1', (4.758, 0.215)), ('W2', (4.565, 0.115)), ('W3', (4.771, 0.015)), ('B', (8.347999572753906, 0.01600000075995922)), ('V', (7.182000160217285, 0.009999999776482582))])), ('1515-1834', OrderedDict([('J', (8.855999946594238, 0.024000000208616257)), ('H', (8.29699993133545, 0.020999999716877937)), ('K', (8.178999900817871, 0.017999999225139618)), ('W1', (8.117, 0.022)), ('W2', (8.15, 0.019)), ('W3', (8.065, 0.02)), ('B', (12.309000015258789, 0.11999999731779099)), ('V', (11.069999694824219, 0.054999999701976776))])) ]) for k in all_photometry: samples_file = path.join(chain_path, '{0}.hdf5'.format(k)) if path.exists(samples_file): logger.info("skipping {0} - samples exist at {1}".format( k, samples_file)) continue phot = all_photometry[k] obs = session.query(Observation).filter(Observation.object == k).one() plx = (obs.tgas_source.parallax, obs.tgas_source.parallax_error) # fit an isochrone model = StarModel(iso, use_emcee=True, parallax=plx, **phot) model.set_bounds(mass=(0.01, 20), feh=(-1, 1), distance=(0, 300), AV=(0, 1)) # initial conditions for emcee walkers nwalkers = 128 p0 = [] m0, age0, feh0 = model.ic.random_points(nwalkers, minmass=0.01, maxmass=10., minfeh=-1, maxfeh=1) _, max_distance = model.bounds('distance') _, max_AV = model.bounds('AV') d0 = 10**(np.random.uniform(0, np.log10(max_distance), size=nwalkers)) AV0 = np.random.uniform(0, max_AV, size=nwalkers) p0 += [m0] p0 += [age0, feh0, d0, AV0] p0 = np.array(p0).T npars = p0.shape[1] # run emcee ninit = 256 nburn = 1024 niter = 4096 logger.debug('Running emcee - initial sampling...') sampler = emcee.EnsembleSampler(nwalkers, npars, model.lnpost) # pos, prob, state = sampler.run_mcmc(p0, ninit) for pos, prob, state in tqdm(sampler.sample(p0, iterations=ninit), total=ninit): pass # cull the weak walkers best_ix = sampler.flatlnprobability.argmax() best_p0 = (sampler.flatchain[best_ix][None] + np.random.normal(0, 1E-5, size=(nwalkers, npars))) sampler.reset() logger.debug('burn-in...') for pos, prob, state in tqdm(sampler.sample(best_p0, iterations=nburn), total=nburn): pass # pos,_,_ = sampler.run_mcmc(best_p0, nburn) sampler.reset() logger.debug('sampling...') # _ = sampler.run_mcmc(pos, niter) for pos, prob, state in tqdm(sampler.sample(pos, iterations=niter), total=niter): pass model._sampler = sampler model._make_samples(0.08) model.samples.to_hdf(samples_file, key='samples') # np.save('isochrone_chains/chain.npy', sampler.chain) logger.debug('...done and saved!')
def from_ini(cls, folder='.', ini_file='fpp.ini', recalc=False, refit_trap=False, star_ini_file='star.ini', ichrone=DARTMOUTH, **kwargs): """ To enable simple usage, initializes a FPPCalculation from a .ini file By default, a file called ``fpp.ini`` will be looked for in the current folder. Also present must be a ``star.ini`` file that contains the observed properties of the target star. ``fpp.ini`` must be of the following form:: name = k2oi ra = 11:30:14.510 dec = +07:35:18.21 period = 32.988 #days rprs = 0.0534 #Rp/Rstar photfile = lc_k2oi.csv [constraints] maxrad = 10 #exclusion radius [arcsec] secthresh = 0.001 #maximum allowed secondary signal depth #This variable defines contrast curves #ccfiles = Keck_J.cc, Lick_J.cc Photfile must be a text file with columns ``(days_from_midtransit, flux, flux_err)``. Both whitespace- and comma-delimited will be tried, using ``np.loadtxt``. Photfile need not be there if there is a pickled :class:`TransitSignal` saved in the same directory as ``ini_file``, named ``trsig.pkl`` (or another name as defined by ``trsig`` keyword in ``.ini`` file). ``star.ini`` should look something like the following:: B = 15.005, 0.06 V = 13.496, 0.05 g = 14.223, 0.05 r = 12.858, 0.04 i = 11.661, 0.08 J = 9.763, 0.03 H = 9.135, 0.03 K = 8.899, 0.02 W1 = 8.769, 0.023 W2 = 8.668, 0.02 W3 = 8.552, 0.025 Kepler = 12.473 #Teff = 3503, 80 #feh = 0.09, 0.09 #logg = 4.89, 0.1 Any star properties can be defined; if errors are included then they will be used in the :class:`isochrones.StarModel` MCMC fit. Spectroscopic parameters (``Teff, feh, logg``) are optional. If included, then they will also be included in :class:`isochrones.StarModel` fit. A magnitude for the band in which the transit signal is observed (e.g., ``Kepler``) is required, though need not have associated uncertainty. :param folder: Folder to find configuration files. :param ini_file: Input configuration file. :param star_ini_file: Input config file for :class:`isochrones.StarModel` fits. :param recalc: Whether to re-calculate :class:`PopulationSet`, if a ``popset.h5`` file is already present :param **kwargs: Keyword arguments passed to :class:`PopulationSet`. Creates: * ``trsig.pkl``: the pickled :class:`vespa.TransitSignal` object. * ``starfield.h5``: the TRILEGAL field star simulation * ``starmodel.h5``: the :class:`isochrones.StarModel` fit * ``popset.h5``: the :class:`vespa.PopulationSet` object representing the model population simulations. """ if not os.path.isabs(ini_file): config = ConfigObj(os.path.join(folder,ini_file)) else: config = ConfigObj(ini_file) folder = os.path.abspath(folder) #required items name = config['name'] ra, dec = config['ra'], config['dec'] period = float(config['period']) rprs = float(config['rprs']) #load starmodels if there; # if not, create them. if 'starmodel_basename' not in config: starmodel_basename = 'dartmouth_starmodel' else: starmodel_basename = config['starmodel_basename'] single_starmodel_file = os.path.join(folder,'{}_single.h5'.format(starmodel_basename)) binary_starmodel_file = os.path.join(folder,'{}_binary.h5'.format(starmodel_basename)) triple_starmodel_file = os.path.join(folder,'{}_triple.h5'.format(starmodel_basename)) #Single try: single_starmodel = StarModel.load_hdf(single_starmodel_file) logging.info('Single StarModel loaded from {}'.format(single_starmodel_file)) except: single_starmodel = StarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting single StarModel to {}...'.format(single_starmodel.properties)) single_starmodel.fit() single_starmodel.save_hdf(single_starmodel_file) triangle_base = os.path.join(folder, '{}_triangle_single'.format(starmodel_basename)) single_starmodel.triangle_plots(triangle_base) logging.info('StarModel fit done.') #Binary try: binary_starmodel = BinaryStarModel.load_hdf(binary_starmodel_file) logging.info('BinaryStarModel loaded from {}'.format(binary_starmodel_file)) except: binary_starmodel = BinaryStarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting BinaryStarModel to {}...'.format(binary_starmodel.properties)) binary_starmodel.fit() binary_starmodel.save_hdf(binary_starmodel_file) triangle_base = os.path.join(folder, '{}_triangle_binary'.format(starmodel_basename)) binary_starmodel.triangle_plots(triangle_base) logging.info('BinaryStarModel fit done.') #Triple try: triple_starmodel = TripleStarModel.load_hdf(triple_starmodel_file) logging.info('TripleStarModel loaded from {}'.format(triple_starmodel_file)) except: triple_starmodel = TripleStarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting TripleStarModel to {}...'.format(triple_starmodel.properties)) triple_starmodel.fit() triple_starmodel.save_hdf(triple_starmodel_file) triangle_base = os.path.join(folder, '{}_triangle_triple'.format(starmodel_basename)) triple_starmodel.triangle_plots(triangle_base) logging.info('TripleStarModel fit done.') if 'popset' in config: popset_file = config['popset'] if not os.path.isabs(popset_file): popset_file = os.path.join(folder, popset_file) else: popset_file = os.path.join(folder,'popset.h5') if 'starfield' in config: trilegal_file = config['starfield'] if not os.path.isabs(trilegal_file): trilegal_file = os.path.join(folder, trilegal_file) else: trilegal_file = os.path.join(folder,'starfield.h5') if 'trsig' in config: trsig_file = config['trsig'] if not os.path.isabs(trsig_file): trsig_file = os.path.join(folder, trsig_file) else: trsig_file = os.path.join(folder,'trsig.pkl') #create TransitSignal if os.path.exists(trsig_file): logging.info('Loading transit signal from {}...'.format(trsig_file)) trsig = pickle.load(open(trsig_file,'rb')) else: if 'photfile' not in config: raise AttributeError('If transit pickle file (trsig.pkl) '+ 'not present, "photfile" must be'+ 'defined.') if not os.path.isabs(config['photfile']): photfile = os.path.join(folder,config['photfile']) else: photfile = config['photfile'] logging.info('Reading transit signal photometry ' + 'from {}...'.format(photfile)) try: ts, fs, dfs = np.loadtxt(photfile, unpack=True) except: ts, fs, dfs = np.loadtxt(photfile, delimiter=',', unpack=True) trsig = TransitSignal(ts, fs, dfs, P=period, name=name) logging.info('Fitting transitsignal with MCMC...') trsig.MCMC() trsig.save(trsig_file) #create PopulationSet try: if recalc: if os.path.exists(popset_file): os.remove(popset_file) raise RuntimeError #just to get to except block try: popset = PopulationSet.load_hdf(popset_file) except HDF5ExtError: os.remove(popset_file) logging.warning('{} file corrupted; removing.'.format(popset_file)) raise RuntimeError #to get to except block for m in DEFAULT_MODELS: popset[m] #should there be a better way to check this? (yes) if refit_trap: os.remove(popset_file) for pop in popset.poplist: logging.info('Re-fitting trapezoids for {}...'.format(pop.model)) pop.fit_trapezoids() pop.save_hdf(popset_file, pop.modelshort, append=True) popset = PopulationSet.load_hdf(popset_file) logging.info('PopulationSet loaded from {}'.format(popset_file)) except: if recalc: do_only = DEFAULT_MODELS else: try: popset = PopulationSet.load_hdf(popset_file) do_only = [] for m in DEFAULT_MODELS: try: popset[m] except: do_only.append(m) except: do_only = DEFAULT_MODELS if os.path.exists(popset_file): logging.warning('{} exists, but regenerating Population Set ({})...'.format(popset_file, do_only), exc_info=True) popset = PopulationSet(period=period, mags=single_starmodel.mags, ra=ra, dec=dec, trilegal_filename=trilegal_file, starmodel=single_starmodel, binary_starmodel=binary_starmodel, triple_starmodel=triple_starmodel, rprs=rprs, do_only=do_only, savefile=popset_file, **kwargs) fpp = cls(trsig, popset, folder=folder) ############# # Apply constraints # Exclusion radius maxrad = float(config['constraints']['maxrad']) fpp.set_maxrad(maxrad) if 'secthresh' in config['constraints']: secthresh = float(config['constraints']['secthresh']) if not np.isnan(secthresh): fpp.apply_secthresh(secthresh) # Odd-even constraint diff = 3 * np.max(trsig.depthfit[1]) fpp.constrain_oddeven(diff) #apply contrast curve constraints if present if 'ccfiles' in config['constraints']: ccfiles = config['constraints']['ccfiles'] if isinstance(ccfiles, basestring): ccfiles = [ccfiles] for ccfile in ccfiles: if not os.path.isabs(ccfile): ccfile = os.path.join(folder, ccfile) m = re.search('(\w+)_(\w+)\.cc',os.path.basename(ccfile)) if not m: logging.warning('Invalid CC filename ({}); '.format(ccfile) + 'skipping.') continue else: band = m.group(2) inst = m.group(1) name = '{} {}-band'.format(inst, band) cc = ContrastCurveFromFile(ccfile, band, name=name) fpp.apply_cc(cc) #apply "velocity contrast curve" if present if 'vcc' in config['constraints']: dv = float(config['constraints']['vcc'][0]) dmag = float(config['constraints']['vcc'][1]) vcc = VelocityContrastCurve(dv, dmag) fpp.apply_vcc(vcc) return fpp
def _check_spec(ic): mod = StarModel(ic, Teff=(5700,100), logg=(4.5, 0.1), feh=(0.0, 0.2)) assert np.isfinite(mod.lnlike([1.0, 9.6, 0.1, 200, 0.2]))
def from_ini(cls, folder='.', ini_file='fpp.ini', recalc=False, refit_trap=False, star_ini_file='star.ini', ichrone=DARTMOUTH, **kwargs): """ To enable simple usage, initializes a FPPCalculation from a .ini file By default, a file called ``fpp.ini`` will be looked for in the current folder. Also present must be a ``star.ini`` file that contains the observed properties of the target star. ``fpp.ini`` must be of the following form:: name = k2oi ra = 11:30:14.510 dec = +07:35:18.21 period = 32.988 #days rprs = 0.0534 #Rp/Rstar photfile = lc_k2oi.csv [constraints] maxrad = 10 #exclusion radius [arcsec] secthresh = 0.001 #maximum allowed secondary signal depth #This variable defines contrast curves #ccfiles = Keck_J.cc, Lick_J.cc Photfile must be a text file with columns ``(days_from_midtransit, flux, flux_err)``. Both whitespace- and comma-delimited will be tried, using ``np.loadtxt``. Photfile need not be there if there is a pickled :class:`TransitSignal` saved in the same directory as ``ini_file``, named ``trsig.pkl`` (or another name as defined by ``trsig`` keyword in ``.ini`` file). ``star.ini`` should look something like the following:: B = 15.005, 0.06 V = 13.496, 0.05 g = 14.223, 0.05 r = 12.858, 0.04 i = 11.661, 0.08 J = 9.763, 0.03 H = 9.135, 0.03 K = 8.899, 0.02 W1 = 8.769, 0.023 W2 = 8.668, 0.02 W3 = 8.552, 0.025 Kepler = 12.473 #Teff = 3503, 80 #feh = 0.09, 0.09 #logg = 4.89, 0.1 Any star properties can be defined; if errors are included then they will be used in the :class:`isochrones.StarModel` MCMC fit. Spectroscopic parameters (``Teff, feh, logg``) are optional. If included, then they will also be included in :class:`isochrones.StarModel` fit. A magnitude for the band in which the transit signal is observed (e.g., ``Kepler``) is required, though need not have associated uncertainty. :param folder: Folder to find configuration files. :param ini_file: Input configuration file. :param star_ini_file: Input config file for :class:`isochrones.StarModel` fits. :param recalc: Whether to re-calculate :class:`PopulationSet`, if a ``popset.h5`` file is already present :param **kwargs: Keyword arguments passed to :class:`PopulationSet`. Creates: * ``trsig.pkl``: the pickled :class:`vespa.TransitSignal` object. * ``starfield.h5``: the TRILEGAL field star simulation * ``starmodel.h5``: the :class:`isochrones.StarModel` fit * ``popset.h5``: the :class:`vespa.PopulationSet` object representing the model population simulations. """ if not os.path.isabs(ini_file): config = ConfigObj(os.path.join(folder, ini_file)) else: config = ConfigObj(ini_file) folder = os.path.abspath(folder) #required items name = config['name'] ra, dec = config['ra'], config['dec'] period = float(config['period']) rprs = float(config['rprs']) #load starmodels if there; # if not, create them. if 'starmodel_basename' not in config: starmodel_basename = 'dartmouth_starmodel' else: starmodel_basename = config['starmodel_basename'] single_starmodel_file = os.path.join( folder, '{}_single.h5'.format(starmodel_basename)) binary_starmodel_file = os.path.join( folder, '{}_binary.h5'.format(starmodel_basename)) triple_starmodel_file = os.path.join( folder, '{}_triple.h5'.format(starmodel_basename)) #Single try: single_starmodel = StarModel.load_hdf(single_starmodel_file) logging.info('Single StarModel loaded from {}'.format( single_starmodel_file)) except: single_starmodel = StarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting single StarModel to {}...'.format( single_starmodel.properties)) single_starmodel.fit() single_starmodel.save_hdf(single_starmodel_file) triangle_base = os.path.join( folder, '{}_triangle_single'.format(starmodel_basename)) single_starmodel.triangle_plots(triangle_base) logging.info('StarModel fit done.') #Binary try: binary_starmodel = BinaryStarModel.load_hdf(binary_starmodel_file) logging.info( 'BinaryStarModel loaded from {}'.format(binary_starmodel_file)) except: binary_starmodel = BinaryStarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting BinaryStarModel to {}...'.format( binary_starmodel.properties)) binary_starmodel.fit() binary_starmodel.save_hdf(binary_starmodel_file) triangle_base = os.path.join( folder, '{}_triangle_binary'.format(starmodel_basename)) binary_starmodel.triangle_plots(triangle_base) logging.info('BinaryStarModel fit done.') #Triple try: triple_starmodel = TripleStarModel.load_hdf(triple_starmodel_file) logging.info( 'TripleStarModel loaded from {}'.format(triple_starmodel_file)) except: triple_starmodel = TripleStarModel.from_ini(ichrone, folder, ini_file=star_ini_file) logging.info('Fitting TripleStarModel to {}...'.format( triple_starmodel.properties)) triple_starmodel.fit() triple_starmodel.save_hdf(triple_starmodel_file) triangle_base = os.path.join( folder, '{}_triangle_triple'.format(starmodel_basename)) triple_starmodel.triangle_plots(triangle_base) logging.info('TripleStarModel fit done.') if 'popset' in config: popset_file = config['popset'] if not os.path.isabs(popset_file): popset_file = os.path.join(folder, popset_file) else: popset_file = os.path.join(folder, 'popset.h5') if 'starfield' in config: trilegal_file = config['starfield'] if not os.path.isabs(trilegal_file): trilegal_file = os.path.join(folder, trilegal_file) else: trilegal_file = os.path.join(folder, 'starfield.h5') if 'trsig' in config: trsig_file = config['trsig'] if not os.path.isabs(trsig_file): trsig_file = os.path.join(folder, trsig_file) else: trsig_file = os.path.join(folder, 'trsig.pkl') #create TransitSignal if os.path.exists(trsig_file): logging.info( 'Loading transit signal from {}...'.format(trsig_file)) trsig = pickle.load(open(trsig_file, 'rb')) else: if 'photfile' not in config: raise AttributeError('If transit pickle file (trsig.pkl) ' + 'not present, "photfile" must be' + 'defined.') if not os.path.isabs(config['photfile']): photfile = os.path.join(folder, config['photfile']) else: photfile = config['photfile'] logging.info('Reading transit signal photometry ' + 'from {}...'.format(photfile)) try: ts, fs, dfs = np.loadtxt(photfile, unpack=True) except: ts, fs, dfs = np.loadtxt(photfile, delimiter=',', unpack=True) trsig = TransitSignal(ts, fs, dfs, P=period, name=name) logging.info('Fitting transitsignal with MCMC...') trsig.MCMC() trsig.save(trsig_file) #create PopulationSet try: if recalc: if os.path.exists(popset_file): os.remove(popset_file) raise RuntimeError #just to get to except block try: popset = PopulationSet.load_hdf(popset_file) except HDF5ExtError: os.remove(popset_file) logging.warning( '{} file corrupted; removing.'.format(popset_file)) raise RuntimeError #to get to except block for m in DEFAULT_MODELS: popset[m] #should there be a better way to check this? (yes) if refit_trap: os.remove(popset_file) for pop in popset.poplist: logging.info('Re-fitting trapezoids for {}...'.format( pop.model)) pop.fit_trapezoids() pop.save_hdf(popset_file, pop.modelshort, append=True) popset = PopulationSet.load_hdf(popset_file) logging.info('PopulationSet loaded from {}'.format(popset_file)) except: if recalc: do_only = DEFAULT_MODELS else: try: popset = PopulationSet.load_hdf(popset_file) do_only = [] for m in DEFAULT_MODELS: try: popset[m] except: do_only.append(m) except: do_only = DEFAULT_MODELS if os.path.exists(popset_file): logging.warning( '{} exists, but regenerating Population Set ({})...'. format(popset_file, do_only), exc_info=True) popset = PopulationSet(period=period, mags=single_starmodel.mags, ra=ra, dec=dec, trilegal_filename=trilegal_file, starmodel=single_starmodel, binary_starmodel=binary_starmodel, triple_starmodel=triple_starmodel, rprs=rprs, do_only=do_only, savefile=popset_file, **kwargs) fpp = cls(trsig, popset, folder=folder) ############# # Apply constraints # Exclusion radius maxrad = float(config['constraints']['maxrad']) fpp.set_maxrad(maxrad) if 'secthresh' in config['constraints']: secthresh = float(config['constraints']['secthresh']) if not np.isnan(secthresh): fpp.apply_secthresh(secthresh) # Odd-even constraint diff = 3 * np.max(trsig.depthfit[1]) fpp.constrain_oddeven(diff) #apply contrast curve constraints if present if 'ccfiles' in config['constraints']: ccfiles = list(config['constraints']['ccfiles']) for ccfile in ccfiles: if not os.path.isabs(ccfile): ccfile = os.path.join(folder, ccfile) m = re.search('(\w+)_(\w+)\.cc', os.path.basename(ccfile)) if not m: logging.warning('Invalid CC filename ({}); ' + 'skipping.'.format(ccfile)) continue else: band = m.group(2) inst = m.group(1) name = '{} {}-band'.format(inst, band) cc = ContrastCurveFromFile(ccfile, band, name=name) fpp.apply_cc(cc) #apply "velocity contrast curve" if present if 'vcc' in config['constraints']: dv = float(config['constraints']['vcc'][0]) dmag = float(config['constraints']['vcc'][1]) vcc = VelocityContrastCurve(dv, dmag) fpp.apply_vcc(vcc) return fpp
def _check_spec(ic): mod = StarModel(ic, Teff=(5700, 100), logg=(4.5, 0.1), feh=(0.0, 0.2)) eep = ic.get_eep(1., 9.6, 0.1, accurate=True) assert np.isfinite(mod.lnlike([eep, 9.6, 0.1, 200, 0.2]))
def test_fitting(): mod_mist = _check_fitting(StarModel(MIST_Isochrone, **props)) _check_saving(mod_mist)
def fit_star(star, verbose=False): output_filename = "{0}.h5".format(star.kepid) logging.info("Output filename: {0}".format(output_filename)) if os.path.exists(output_filename): return time.sleep(30) return strt = time.time() # The KIC parameters mean_log_mass = np.log(star.mass) sigma_log_mass = (np.log(star.mass + star.mass_err1) - np.log(star.mass + star.mass_err2) ) # double the kic value mean_feh = star.feh sigma_feh = star.feh_err1 - star.feh_err2 # double the kic value min_distance, max_distance = 0.0, 3000.0 # Other bands other_bands = dict() if np.isfinite(star.tgas_w1gmag): other_bands = dict( W1=(star.tgas_w1gmag, star.tgas_w1gmag_error), W2=(star.tgas_w2gmag, star.tgas_w2gmag_error), W3=(star.tgas_w3gmag, star.tgas_w3gmag_error), ) if np.isfinite(star.tgas_Vmag): other_bands["V"] = (star.tgas_Vmag, star.tgas_e_Vmag) if np.isfinite(star.tgas_Bmag): other_bands["B"] = (star.tgas_Bmag, star.tgas_e_Bmag) if np.isfinite(star.tgas_gpmag): other_bands["g"] = (star.tgas_gpmag, star.tgas_e_gpmag) if np.isfinite(star.tgas_rpmag): other_bands["r"] = (star.tgas_rpmag, star.tgas_e_rpmag) if np.isfinite(star.tgas_ipmag): other_bands["i"] = (star.tgas_ipmag, star.tgas_e_ipmag) # Build the model mist = MIST_Isochrone() mod = StarModel(mist, J=(star.jmag, star.jmag_err), H=(star.hmag, star.hmag_err), K=(star.kmag, star.kmag_err), parallax=(star.tgas_parallax, star.tgas_parallax_error), **other_bands) # Initialize nwalkers = 500 ndim = 5 lnpost_init = -np.inf + np.zeros(nwalkers) coords_init = np.empty((nwalkers, ndim)) m = ~np.isfinite(lnpost_init) while np.any(m): K = m.sum() # Mass coords_init[m, 0] = np.exp(mean_log_mass + sigma_log_mass * np.random.randn(K)) # Age u = np.random.rand(K) coords_init[m, 1] = np.log((np.exp(mist.maxage) - np.exp(mist.minage)) * u + np.exp(mist.minage)) # Fe/H coords_init[m, 2] = mean_feh + sigma_feh * np.random.randn(K) # Distance u = np.random.rand(K) coords_init[m, 3] = (u * (max_distance**3 - min_distance**3) + min_distance**3)**(1. / 3) # Av coords_init[m, 4] = np.random.rand(K) lnpost_init[m] = np.array(list(map(mod.lnpost, coords_init[m]))) m = ~np.isfinite(lnpost_init) class ICModel(emcee3.Model): def compute_log_prior(self, state): state.log_prior = mod.lnprior(state.coords) return state def compute_log_likelihood(self, state): state.log_likelihood = mod.lnlike(state.coords) return state sampler = emcee3.Sampler(emcee3.moves.KDEMove()) ensemble = emcee3.Ensemble(ICModel(), coords_init) chunksize = 200 targetn = 3 for iteration in range(100): if verbose: print("Iteration {0}...".format(iteration + 1)) sampler.run(ensemble, chunksize, progress=verbose) mu = np.mean(sampler.get_coords(), axis=1) try: tau = emcee3.autocorr.integrated_time(mu, c=1) except emcee3.autocorr.AutocorrError: continue tau_max = tau.max() neff = ((iteration + 1) * chunksize / tau_max - 2.0) if verbose: print("Maximum autocorrelation time: {0}".format(tau_max)) print("N_eff: {0}\n".format(neff * nwalkers)) if neff > targetn: break burnin = int(2 * tau_max) ntot = 5000 if verbose: print("Discarding {0} samples for burn-in".format(burnin)) print("Randomly choosing {0} samples".format(ntot)) samples = sampler.get_coords(flat=True, discard=burnin) total_samples = len(total_samples) inds = np.random.choice(np.arange(len(samples)), size=ntot, replace=False) samples = samples[inds] fit_parameters = np.empty(len(samples), dtype=[ ("mass", float), ("log10_age", float), ("feh", float), ("distance", float), ("av", float), ]) computed_parameters = np.empty(len(samples), dtype=[ ("radius", float), ("teff", float), ("logg", float), ]) if verbose: prog = tqdm.tqdm else: prog = lambda f, *args, **kwargs: f for i, p in prog(enumerate(samples), total=len(samples)): ic = mod.ic(*p) fit_parameters[i] = p computed_parameters[i] = (ic["radius"], ic["Teff"], ic["logg"]) total_time = time.time() - strt logging.info("emcee3 took {0} sec".format(total_time)) with h5py.File(output_filename, "w") as f: f.attrs["kepid"] = int(star.kepid) f.attrs["neff"] = neff * nwalkers f.attrs["runtime"] = total_time f.create_dataset("fit_parameters", data=fit_parameters) f.create_dataset("computed_parameters", data=computed_parameters) # Plot fig = corner.corner(samples) fig.savefig("corner-{0}.png".format(star.kepid)) plt.close(fig)
def get_mod(self, ic, folder): return StarModel.from_ini(ic, folder=folder, index=self.index)
import corner from isochrones import StarModel from isochrones.mist import MIST_Isochrone # params from Gaia archive, accessed Dec 2018 Teff = (4444.2, 160) logg = (3.0, 0.5) feh = (0.0, 0.25) parallax = (31.92,0.05) # age from BJ's Specmatch-Gaia results age = [9.9,0.1] # log10(age in yr) mist = MIST_Isochrone() model = StarModel(mist, Teff=Teff, logg=logg, feh=feh, parallax=parallax, age=age, use_emcee=True) model.fit(niter=1000) # add in prior on age age_post = model.samples.age_0 age_prior_probs = nm(age[0],age[1]).pdf(age_post) compare = np.random.uniform(size=len(age_post)) new_samples = model.samples.loc[age_prior_probs > compare] new_samples = new_samples[['mass_0_0','radius_0_0','feh_0','age_0','distance_0']] # make figure corner.corner( new_samples, labels=['Mass [Msol]','Radius [Rsol]','Fe/H','log10(age [Gyr])','distance [pc]'],
def get_isochrone_params(stars, modeldir, overwrite=False): """Fill out parameter table with values obtained from isochrone package Args: stars (pd.DataFrame): parameter table modeldir (str): directory to save fitted models overwrite (bool, optional): whether to use existing models or overwrite Returns: stars (pd.DataFrame): updated parameter table """ dar = Dartmouth_Isochrone() num_stars = len(stars) current_star = 1 for idx, row in stars.iterrows(): print("Getting isochrone parameters for star {0} of {1}" .format(current_star, num_stars)) current_star += 1 # get known stellar properties lib_props = {} for p in library.Library.STAR_PROPS: if not np.isnan(row[p]): # (value, uncertainty) lib_props[p] = (row[p], row['u_'+p]) # if all properties are known, we don't need to use the model if len(lib_props) == 6: continue if modeldir is None: model = StarModel(dar, **lib_props) model.fit(overwrite=True, verbose=False) else: # check if fitting has already been done modelfile = os.path.join(modeldir, "{0}_model.h5".format(row['cps_name'])) if os.path.exists(modelfile) and not overwrite: model = StarModel.load_hdf(modelfile) # otherwise perform the fit else: model = StarModel(dar, **lib_props) model.fit(overwrite=True, verbose=False) model.save_hdf(modelfile) N_SIGMA = 2 MAX_PERCENTILE = 0.95 MIN_PERCENTILE = 0.05 # fill out unknown parameters for p in library.Library.STAR_PROPS: value = model.samples[p].quantile(0.5) upper_bound = model.samples[p].quantile(MAX_PERCENTILE) lower_bound = model.samples[p].quantile(MIN_PERCENTILE) # If a property is already known, check for model consistency if p in lib_props: # check if 2-sigma bounds fail to overlap if (row[p] + N_SIGMA * row['u_'+p]) < lower_bound \ or (row[p] - N_SIGMA * row['u_'+p]) > upper_bound: warningstr = "Warning: Inconisistent {0} for star {1}\n"\ .format(p, row['cps_name']) warningstr += "\tLibrary values: {0:.2f} +/- {1:.2f}\n"\ .format(row[p], row['u_'+p]) warningstr += "\tModel values: {0:.2f}, ".format(value) warningstr += "{0:d}-sigma = ({1:.2f}, {2:.2f})\n".format( N_SIGMA, lower_bound, upper_bound) print(warningstr) # Save error messages to file errpath = os.path.join(modeldir, 'errors.txt') with open(errpath, 'a') as f: f.write(warningstr) # Insert the unknown values if we don't already know them else: stars.loc[idx, p] = np.around(value, 2) stars.loc[idx, 'u_'+p] = \ np.around((upper_bound - lower_bound) / 2, 2) return stars
def get_isochrone_params(stars, modeldir, overwrite=False): """Fill out parameter table with values obtained from isochrone package Args: stars (pd.DataFrame): parameter table modeldir (str): directory to save fitted models overwrite (bool, optional): whether to use existing models or overwrite Returns: stars (pd.DataFrame): updated parameter table """ dar = Dartmouth_Isochrone() num_stars = len(stars) current_star = 1 for idx, row in stars.iterrows(): print("Getting isochrone parameters for star {0} of {1}".format( current_star, num_stars)) current_star += 1 # get known stellar properties lib_props = {} for p in library.Library.STAR_PROPS: if not np.isnan(row[p]): # (value, uncertainty) lib_props[p] = (row[p], row['u_' + p]) # if all properties are known, we don't need to use the model if len(lib_props) == 6: continue if modeldir is None: model = StarModel(dar, **lib_props) model.fit(overwrite=True, verbose=False) else: # check if fitting has already been done modelfile = os.path.join(modeldir, "{0}_model.h5".format(row['cps_name'])) if os.path.exists(modelfile) and not overwrite: model = StarModel.load_hdf(modelfile) # otherwise perform the fit else: model = StarModel(dar, **lib_props) model.fit(overwrite=True, verbose=False) model.save_hdf(modelfile) N_SIGMA = 2 MAX_PERCENTILE = 0.95 MIN_PERCENTILE = 0.05 # fill out unknown parameters for p in library.Library.STAR_PROPS: value = model.samples[p].quantile(0.5) upper_bound = model.samples[p].quantile(MAX_PERCENTILE) lower_bound = model.samples[p].quantile(MIN_PERCENTILE) # If a property is already known, check for model consistency if p in lib_props: # check if 2-sigma bounds fail to overlap if (row[p] + N_SIGMA * row['u_'+p]) < lower_bound \ or (row[p] - N_SIGMA * row['u_'+p]) > upper_bound: warningstr = "Warning: Inconisistent {0} for star {1}\n"\ .format(p, row['cps_name']) warningstr += "\tLibrary values: {0:.2f} +/- {1:.2f}\n"\ .format(row[p], row['u_'+p]) warningstr += "\tModel values: {0:.2f}, ".format(value) warningstr += "{0:d}-sigma = ({1:.2f}, {2:.2f})\n".format( N_SIGMA, lower_bound, upper_bound) print(warningstr) # Save error messages to file errpath = os.path.join(modeldir, 'errors.txt') with open(errpath, 'a') as f: f.write(warningstr) # Insert the unknown values if we don't already know them else: stars.loc[idx, p] = np.around(value, 2) stars.loc[idx, 'u_'+p] = \ np.around((upper_bound - lower_bound) / 2, 2) return stars
def main(index, overwrite=False): # First make sure paths exist: # os.makedirs('../cache', exist_ok=True) # os.makedirs('../plots/isochrones', exist_ok=True) # Load the DECam photometry decam = load_data('../data/decam_apw.fits') iso = MIST_Isochrone(['PanSTARRS_g', 'PanSTARRS_i', 'SkyMapper_u']) row = decam[index] name = 'lmcla-{0}-'.format(row['index']) model_file = '../cache/starmodels-{0}.hdf5'.format(str(row['index'])) if path.exists(model_file) and not overwrite: print('skipping {0} - already exists'.format(name)) sys.exit(0) # This is our "anchor star": it was identified as being near the turnoff, # bright, and positionally consistent with being in the LA cluster: j1, = np.where(decam['index'] == 24365)[0] j2 = index if j1 == j2: print('skipping anchor-anchor pair') sys.exit(0) # To fit pairs as resolved binaries, we have to construct the observation # tree manually: tree = ObservationTree() for b in ['PanSTARRS_g', 'PanSTARRS_i', 'SkyMapper_u']: survey, band = b.split('_') if band == 'u' and decam[band.capitalize() + 'MAG'][j2] > 21: extra_s = 0.2 else: extra_s = 0.005 o = Observation(survey, b, 1.) s0 = Source( decam[band.capitalize() + 'MAG'][j1], np.sqrt(0.005**2 + decam[band.capitalize() + 'ERR'][j1]**2)) s1 = Source(decam[band.capitalize() + 'MAG'][j2], np.sqrt(extra_s**2 + decam[band.capitalize() + 'ERR'][j2]**2), separation=100.) o.add_source(s0) o.add_source(s1) tree.add_observation(o) model = StarModel(ic=iso, obs=tree, N=[1, 2]) # model = StarModel(ic=iso, obs=tree) print('setting priors') dist_bounds = [1 * 1e3, 100 * 1e3] # pc # model._priors['distance'] = FlatPrior(dist_bounds) model._priors['distance'] = GaussianPrior(30 * 1e3, 10 * 1e3, bounds=dist_bounds) model.set_bounds(distance=dist_bounds) # 1 to 100 kpc feh_bounds = (-2., 0.5) model.set_bounds(feh=feh_bounds) # model._priors['feh'] = FlatPrior(feh_bounds) model._priors['feh'] = GaussianPrior(-1.1, 0.5, bounds=feh_bounds) AV_bounds = (0, 1) model.set_bounds(AV=AV_bounds) model._priors['AV'] = PowerLawPrior(-1.1, (1e-3, 1)) # model._priors['AV'] = GaussianPrior(0.2, 0.1, bounds=AV_bounds) age_bounds = (7, 9.5) model.set_bounds(age=age_bounds) # model._priors['age'] = GaussianPrior(8, 0.5, bounds=age_bounds) model._priors['age'] = FlatPrior(age_bounds) print('sampling star {0}'.format(row['index'])) model.fit_multinest(basename=name, refit=overwrite, overwrite=overwrite, n_live_points=2048) # model.fit_mcmc(nwalkers=nwalkers, # p0=np.array([350., 8., -0.5, 30000., 0.1]), # nburn=1024, niter=2048) model.save_hdf(model_file) fig = model.corner_physical() fig.savefig('../plots/isochrones/{0}-physical.png'.format(row['index']), dpi=200) plt.close(fig) fig = model.corner_observed() fig.savefig('../plots/isochrones/{0}-observed.png'.format(row['index']), dpi=200) plt.close(fig) # model._samples = model.samples[::1024] # model.save_hdf(sm_model_file) sys.exit(0)
def from_ini(cls, folder, ini_file='fpp.ini', ichrone='mist', recalc=False, refit_trap=False, **kwargs): """ To enable simple usage, initializes a FPPCalculation from a .ini file By default, a file called ``fpp.ini`` will be looked for in the current folder. Also present must be a ``star.ini`` file that contains the observed properties of the target star. ``fpp.ini`` must be of the following form:: name = k2oi ra = 11:30:14.510 dec = +07:35:18.21 period = 32.988 #days rprs = 0.0534 #Rp/Rstar photfile = lc_k2oi.csv [constraints] maxrad = 10 #exclusion radius [arcsec] secthresh = 0.001 #maximum allowed secondary signal depth #This variable defines contrast curves #ccfiles = Keck_J.cc, Lick_J.cc Photfile must be a text file with columns ``(days_from_midtransit, flux, flux_err)``. Both whitespace- and comma-delimited will be tried, using ``np.loadtxt``. Photfile need not be there if there is a pickled :class:`TransitSignal` saved in the same directory as ``ini_file``, named ``trsig.pkl`` (or another name as defined by ``trsig`` keyword in ``.ini`` file). ``star.ini`` should look something like the following:: B = 15.005, 0.06 V = 13.496, 0.05 g = 14.223, 0.05 r = 12.858, 0.04 i = 11.661, 0.08 J = 9.763, 0.03 H = 9.135, 0.03 K = 8.899, 0.02 W1 = 8.769, 0.023 W2 = 8.668, 0.02 W3 = 8.552, 0.025 Kepler = 12.473 #Teff = 3503, 80 #feh = 0.09, 0.09 #logg = 4.89, 0.1 Any star properties can be defined; if errors are included then they will be used in the :class:`isochrones.StarModel` MCMC fit. Spectroscopic parameters (``Teff, feh, logg``) are optional. If included, then they will also be included in :class:`isochrones.StarModel` fit. A magnitude for the band in which the transit signal is observed (e.g., ``Kepler``) is required, though need not have associated uncertainty. :param folder: Folder to find configuration files. :param ini_file: Input configuration file. :param star_ini_file: Input config file for :class:`isochrones.StarModel` fits. :param recalc: Whether to re-calculate :class:`PopulationSet`, if a ``popset.h5`` file is already present :param **kwargs: Keyword arguments passed to :class:`PopulationSet`. Creates: * ``trsig.pkl``: the pickled :class:`vespa.TransitSignal` object. * ``starfield.h5``: the TRILEGAL field star simulation * ``starmodel.h5``: the :class:`isochrones.StarModel` fit * ``popset.h5``: the :class:`vespa.PopulationSet` object representing the model population simulations. Raises ------ RuntimeError : If single, double, and triple starmodels are not computed, then raises with admonition to run `starfit --all`. AttributeError : If `trsig.pkl` not present in folder, and `photfile` is not defined in config file. """ # Check if all starmodel fits are done. # If not, tell user to run 'starfit --all' config = ConfigObj(os.path.join(folder, ini_file)) # Load required entries from ini_file try: name = config['name'] ra, dec = config['ra'], config['dec'] period = float(config['period']) rprs = float(config['rprs']) except KeyError as err: raise KeyError( 'Missing required element of ini file: {}'.format(err)) try: cadence = float(config['cadence']) except KeyError: logging.warning( 'Cadence not provided in fpp.ini; defaulting to Kepler cadence.' ) logging.warning( 'If this is not a Kepler target, please set cadence (in days).' ) cadence = 1626.0 / 86400 # Default to Kepler cadence def fullpath(filename): if os.path.isabs(filename): return filename else: return os.path.join(folder, filename) # Non-required entries with default values popset_file = fullpath(config.get('popset', 'popset.h5')) starfield_file = fullpath(config.get('starfield', 'starfield.h5')) trsig_file = fullpath(config.get('trsig', 'trsig.pkl')) # Check for StarModel fits starmodel_basename = config.get('starmodel_basename', '{}_starmodel'.format(ichrone)) single_starmodel_file = os.path.join( folder, '{}_single.h5'.format(starmodel_basename)) binary_starmodel_file = os.path.join( folder, '{}_binary.h5'.format(starmodel_basename)) triple_starmodel_file = os.path.join( folder, '{}_triple.h5'.format(starmodel_basename)) try: single_starmodel = StarModel.load_hdf(single_starmodel_file) binary_starmodel = StarModel.load_hdf(binary_starmodel_file) triple_starmodel = StarModel.load_hdf(triple_starmodel_file) except Exception as e: print(e) raise RuntimeError('Cannot load StarModels. ' + 'Please run `starfit --all {}`.'.format(folder)) # Create (or load) TransitSignal if os.path.exists(trsig_file): logging.info( 'Loading transit signal from {}...'.format(trsig_file)) with open(trsig_file, 'rb') as f: trsig = pickle.load(f) else: try: photfile = fullpath(config['photfile']) except KeyError: raise AttributeError('If transit pickle file (trsig.pkl) ' + 'not present, "photfile" must be' + 'defined.') trsig = TransitSignal.from_ascii(photfile, P=period, name=name) if not trsig.hasMCMC or refit_trap: logging.info('Fitting transitsignal with MCMC...') trsig.MCMC() trsig.save(trsig_file) # Create (or load) PopulationSet do_only = DEFAULT_MODELS if os.path.exists(popset_file): if recalc: os.remove(popset_file) else: with pd.HDFStore(popset_file) as store: do_only = [m for m in DEFAULT_MODELS if m not in store] # Check that properties of saved population match requested try: popset = PopulationSet.load_hdf(popset_file) for pop in popset.poplist: if pop.cadence != cadence: raise ValueError( 'Requested cadence ({}) '.format(cadence) + 'does not match stored {})! Set recalc=True.'.format( pop.cadence)) except: raise if do_only: logging.info( 'Generating {} models for PopulationSet...'.format(do_only)) else: logging.info( 'Populations ({}) already generated.'.format(DEFAULT_MODELS)) popset = PopulationSet( period=period, cadence=cadence, mags=single_starmodel.mags, ra=ra, dec=dec, trilegal_filename=starfield_file, # Maybe change parameter name? starmodel=single_starmodel, binary_starmodel=binary_starmodel, triple_starmodel=triple_starmodel, rprs=rprs, do_only=do_only, savefile=popset_file, **kwargs) fpp = cls(trsig, popset, folder=folder) ############# # Apply constraints # Exclusion radius maxrad = float(config['constraints']['maxrad']) fpp.set_maxrad(maxrad) if 'secthresh' in config['constraints']: secthresh = float(config['constraints']['secthresh']) if not np.isnan(secthresh): fpp.apply_secthresh(secthresh) # Odd-even constraint diff = 3 * np.max(trsig.depthfit[1]) fpp.constrain_oddeven(diff) #apply contrast curve constraints if present if 'ccfiles' in config['constraints']: ccfiles = config['constraints']['ccfiles'] if isinstance(ccfiles, string_types): ccfiles = [ccfiles] for ccfile in ccfiles: if not os.path.isabs(ccfile): ccfile = os.path.join(folder, ccfile) m = re.search('(\w+)_(\w+)\.cc', os.path.basename(ccfile)) if not m: logging.warning( 'Invalid CC filename ({}); '.format(ccfile) + 'skipping.') continue else: band = m.group(2) inst = m.group(1) name = '{} {}-band'.format(inst, band) cc = ContrastCurveFromFile(ccfile, band, name=name) fpp.apply_cc(cc) #apply "velocity contrast curve" if present if 'vcc' in config['constraints']: dv = float(config['constraints']['vcc'][0]) dmag = float(config['constraints']['vcc'][1]) vcc = VelocityContrastCurve(dv, dmag) fpp.apply_vcc(vcc) return fpp
def test_likelihood_rotation_giant(): """ Make sure that the lhf can cope with zeros, NaNs and None values for the rotation period. Also, check that there is a drop in likelihood at the eep = 454 boundary. """ iso_params = pd.DataFrame( dict({ "teff": (5777, 10), "logg": (4.44, .05), "feh": (0., .001), "parallax": (1., .01) })) # mas # Set up the StarModel isochrones object. mod = StarModel(mist, **iso_params) lnparams = [355, np.log10(4.56 * 1e9), 0., np.log(1000), 0.] args = [mod, None, None, .65, 1., False, False] # the lnprob arguments] none_lnprob = lnprob(lnparams, *args) args = [mod, np.nan, np.nan, .65, 1., False, False] # the lnprob arguments] nan_lnprob = lnprob(lnparams, *args) args = [mod, 0., 0., .65, 1., False, False] # the lnprob arguments] zero_lnprob = lnprob(lnparams, *args) args = [mod, 26., 1., .65, 1., True, False] # the lnprob arguments] iso_lnprob = lnprob(lnparams, *args) args = [mod, 26., 1., .65, 1., False, True] # the lnprob arguments] gyro_lnprob = lnprob(lnparams, *args) # check that gyro is switched off for all of these. none_lnprob == nan_lnprob nan_lnprob == zero_lnprob # check that gyro on gives different lnprob assert gyro_lnprob != iso_lnprob # Likelihood should be greater for dwarfs because gyro lnlike is a broad # Gaussian for giants. giant_params = [455, np.log10(4.56 * 1e9), 0., np.log(1000), 0.] dwarf_params = [453, np.log10(4.56 * 1e9), 0., np.log(1000), 0.] args = [mod, 26., 1., None, None, False, False] giant_lnprob = lnprob(giant_params, *args) dwarf_lnprob = lnprob(dwarf_params, *args) assert giant_lnprob[0] < dwarf_lnprob[0] # Likelihood should be greater for cool stars because gyro lnlike is a # broad Gaussian for giants. heep, hage, hfeh = 405, np.log10(2.295 * 1e9), 0. ceep, cage, cfeh = 355, np.log10(4.56 * 1e9), 0. hot_params = [heep, hage, hfeh, np.log(1000), 0.] cool_params = [ceep, cage, cfeh, np.log(1000), 0.] cool_prot = gyro_model_rossby( cage, calc_bv(cool_params), mist.interp_value([ceep, cage, cfeh], ["mass"])) hot_prot = gyro_model_rossby( hage, calc_bv(hot_params), mist.interp_value([heep, hage, hfeh], ["mass"])) cool_args = [mod, cool_prot, 1., None, None, False, False] hot_args = [mod, hot_prot, 1., None, None, False, False] hot_lnprob = lnprob(hot_params, *args) cool_lnprob = lnprob(cool_params, *args) assert hot_lnprob[0] < cool_lnprob[0], "cool star likelihood should be" \ " higher than hot star likelihood"