Пример #1
0
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open(
            "../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux",
            orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface(
            "../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {
            "temp": 6000,
            "logg": 4.05,
            "Z": -0.4,
            "vsini": 10.5,
            "vz": 15.5,
            "logOmega": -19.665
        }
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum,
                           self.Instrument,
                           self.HDF5Interface,
                           stellar_tuple=stellar_tuple,
                           cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple,
                           region_tuple=region_tuple,
                           outdir="")
Пример #2
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(prog="plotly_model.py", description="Plot the model and residuals using plot.ly")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params", help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json: #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params: #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'], orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument, myHDF5Interface)

    for model in myModel.OrderModels:

        #Get the data
        wl, fl = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get residuals
        residuals = model.get_residuals()

        name = "Order {}".format(model.order)

        url = plotly_order(name, wl, fl, flm, residuals)
        print(url)
Пример #3
0
    def setup_class(self):
    #It is necessary to use a piece of data created on the super computer so we can test interpolation in 4D
        self.hdf5interface = HDF5Interface("libraries/PHOENIX_submaster.hdf5")
    #libraries/PHOENIX_submaster.hd5 should have the following bounds
    #{"temp":(6000, 7000), "logg":(3.5,5.5), "Z":(-1.0,0.0), "alpha":(0.0,0.4)}
        from StellarSpectra.spectrum import DataSpectrum
        self.DataSpectrum = DataSpectrum.open("/home/ian/Grad/Research/Disks/StellarSpectra/tests/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=np.array([21, 22, 23]))

        #TODO: test DataSpectrum with different number of orders, and see how it is truncated.

        self.interpolator = ModelInterpolator(self.hdf5interface, self.DataSpectrum, cache_max=20, cache_dump=10)
Пример #4
0
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open("../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface("../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {"temp":6000, "logg":4.05, "Z":-0.4, "vsini":10.5, "vz":15.5, "logOmega":-19.665}
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum, self.Instrument, self.HDF5Interface, stellar_tuple=stellar_tuple, cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple, region_tuple=region_tuple, outdir="")
Пример #5
0
    def setup_class(self):
        #It is necessary to use a piece of data created on the super computer so we can test interpolation in 4D
        self.hdf5interface = HDF5Interface("libraries/PHOENIX_submaster.hdf5")
        #libraries/PHOENIX_submaster.hd5 should have the following bounds
        #{"temp":(6000, 7000), "logg":(3.5,5.5), "Z":(-1.0,0.0), "alpha":(0.0,0.4)}
        from StellarSpectra.spectrum import DataSpectrum
        self.DataSpectrum = DataSpectrum.open(
            "/home/ian/Grad/Research/Disks/StellarSpectra/tests/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux",
            orders=np.array([21, 22, 23]))

        #TODO: test DataSpectrum with different number of orders, and see how it is truncated.

        self.interpolator = ModelInterpolator(self.hdf5interface,
                                              self.DataSpectrum,
                                              cache_max=20,
                                              cache_dump=10)
Пример #6
0
def main():
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES

    myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([22]))
    myInstrument = TRES()

    myComp = AccuracyComparison(
        myDataSpectrum,
        myInstrument,
        "../../libraries/PHOENIX_submaster.hdf5",
        "../../libraries/PHOENIX_objgrid6000.hdf5",
        {"temp": 6000, "logg": 4.5, "Z": -0.5},
        {"temp": 10, "logg": 0.05, "Z": 0.05},
    )

    myComp.plot_quality()
Пример #7
0
def main():
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES

    myDataSpectrum = DataSpectrum.open(
        "../../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([22]))
    myInstrument = TRES()

    myComp = AccuracyComparison(myDataSpectrum, myInstrument,
                                "../../libraries/PHOENIX_submaster.hdf5",
                                "../../libraries/PHOENIX_objgrid6000.hdf5", {
                                    "temp": 6000,
                                    "logg": 4.5,
                                    "Z": -0.5
                                }, {
                                    "temp": 10,
                                    "logg": 0.05,
                                    "Z": 0.05
                                })

    myComp.plot_quality()
Пример #8
0
import StellarSpectra.constants as C
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.covariance import CovarianceMatrix
import logging

#Set up the logger
logging.basicConfig(
    format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s",
    filename="log.log",
    level=logging.DEBUG,
    filemode="w",
    datefmt='%m/%d/%Y %I:%M:%S %p')

#interface = HDF5Interface("../libraries/PHOENIX_F.hdf5")
interface = HDF5Interface("../libraries/PHOENIX_TRES_F.hdf5")
dataspec = DataSpectrum.open("../data/WASP14/WASP14-2009-06-14.hdf5",
                             orders=np.array([22]))

interpolator = Interpolator(interface, dataspec, trilinear=True)

params = {"temp": 6010, "logg": 4.1, "Z": -0.3}
fl, errspec = interpolator(params)

#A good test here would be to create errspec as an arange() so that inside of cov.h we know how we are indexing this
#N = np.prod(errspec.shape)
#testspec = np.arange(N, dtype="f8")
#print(len(fl))
#testspec.shape = errspec.shape
#print(testspec)

#Create a CovarianceMatrix object
cov = CovarianceMatrix(dataspec, 0, 20, debug=True)
Пример #9
0
def line_classes_strip():

    spec = DataSpectrum.open("data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=[22,23])
    fl23, fl24 = spec.fls
    sig23, sig24 = spec.sigmas

    #Model?
    wlsz23 = np.load("residuals/wlsz23.npy")[0]
    f23 = np.load("residuals/fl23.npy")[0]
    wlsz24 = np.load("residuals/wlsz24.npy")[0]
    f24 = np.load("residuals/fl24.npy")[0]

    #import matplotlib
    #font = {'size' : 8}

    #matplotlib.rc('font', **font)
    #matplotlib.rc('labelsize', **font)
    r23 = (fl23 - f23)/sig23
    r24 = (fl24 - f24)/sig24
    fl23 /= 2e-13
    fl24 /= 2e-13
    f23 /= 2e-13
    f24 /= 2e-13

    #Class 0
    fig, ax = plt.subplots(nrows=2, figsize=(3,4), sharex=True)
    ax[0].plot(wlsz23, fl23, "b", label="data")
    ax[0].plot(wlsz23, f23, "r", label="model")
    ax[0].legend(loc="lower left")
    ax[0].set_title("Class 0")
    ax[1].plot(wlsz23, r23, "g")
    ax[1].set_xlim(5136.4, 5140.4)
    ax[1].set_ylim(-4,4)
    labels = ax[1].get_xticklabels()
    for label in labels:
        label.set_rotation(60)
    ax[0].set_ylabel(r"$\propto f_\lambda$")
    ax[1].set_ylabel(r"Residuals$/\sigma_P$")
    ax[1].xaxis.set_major_formatter(FSF("%.0f"))
    ax[1].xaxis.set_major_locator(MultipleLocator(1.))
    for i in [0, 1]:
        ax[i].tick_params(axis='both', which='major', labelsize=10)
    fig.subplots_adjust(left=0.19, hspace=0.1, right=0.96, top=0.92)
    fig.savefig("plots/class0.png")
    plt.close(fig)

    fig, ax = plt.subplots(nrows=2, ncols=2, figsize=(4,4.))
    #Class I
    ax[0,0].plot(wlsz23, fl23, "b")
    ax[0,0].plot(wlsz23, f23, "r")
    ax[0,0].set_title("Class I")
    ax[0,0].set_ylim(0.4, 1.1)

    ax[1,0].plot(wlsz23, r23, "g")
    ax[0,0].set_xlim(5188, 5189.5)
    ax[1,0].set_xlim(5188, 5189.5)
    #ax[1,1].set_ylim(-4,4)

    #Class II
    ax[0,1].plot(wlsz24, fl24, "b", label='data')
    ax[0,1].plot(wlsz24, f24, "r", label='model')
    ax[0,1].set_ylim(0.3, 1.1)
    ax[0,1].legend(loc="lower center", prop={'size':10})
    ax[0,1].set_title("Class II")

    ax[1,1].plot(wlsz24, r24, "g")
    ax[0,1].set_xlim(5258, 5260)
    ax[1,1].set_xlim(5258, 5260)
    ax[0,0].set_ylabel(r"$\propto f_\lambda$")
    ax[1,0].set_ylabel(r"Residuals$/\sigma_P$")

    for i in [0,1]:
        for j in [0,1]:
            ax[i,j].xaxis.set_major_formatter(FSF("%.1f"))
            ax[i,j].xaxis.set_major_locator(MultipleLocator(0.5))
            ax[i,j].tick_params(axis='both', which='major', labelsize=10)
        ax[0,i].xaxis.set_ticklabels([])

    for j in range(2):
        labels = ax[1,j].get_xticklabels()
        for label in labels:
            label.set_rotation(60)

    fig.subplots_adjust(left=0.15, hspace=0.1, wspace=0.23, right=0.96, top=0.92, bottom=0.15)
    fig.savefig("plots/classI_II.png")
    plt.close(fig)

    #Class III
    fig, ax = plt.subplots(nrows=2, figsize=(3,4), sharex=True)
    ax[0].plot(wlsz24, fl24, "b", label="data")
    ax[0].plot(wlsz24, f24, "r", label="model")
    ax[0].legend(loc="lower left")
    ax[0].set_title("Class III")
    ax[1].plot(wlsz24, r24, "g")
    ax[1].set_xlim(5260, 5271)
    ax[1].set_ylim(-15, 15)
    ax[0].set_ylabel(r"$\propto f_\lambda$")
    ax[1].set_ylabel(r"Residuals$/\sigma_P$")
    ax[1].xaxis.set_major_formatter(FSF("%.0f"))
    ax[1].xaxis.set_major_locator(MultipleLocator(2.))
    for i in [0, 1]:
        ax[i].tick_params(axis='both', which='major', labelsize=10)
    labels = ax[1].get_xticklabels()
    for label in labels:
        label.set_rotation(60)

    fig.subplots_adjust(left=0.19, hspace=0.1, right=0.96, top=0.92)
    fig.savefig("plots/classIII.png")
    plt.close(fig)
Пример #10
0
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.covariance import CovarianceMatrix
import logging

# Set up the logger
logging.basicConfig(
    format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s",
    filename="log.log",
    level=logging.DEBUG,
    filemode="w",
    datefmt="%m/%d/%Y %I:%M:%S %p",
)

# interface = HDF5Interface("../libraries/PHOENIX_F.hdf5")
interface = HDF5Interface("../libraries/PHOENIX_TRES_F.hdf5")
dataspec = DataSpectrum.open("../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([22]))

interpolator = Interpolator(interface, dataspec, trilinear=True)

params = {"temp": 6010, "logg": 4.1, "Z": -0.3}
fl, errspec = interpolator(params)

# A good test here would be to create errspec as an arange() so that inside of cov.h we know how we are indexing this
# N = np.prod(errspec.shape)
# testspec = np.arange(N, dtype="f8")
# print(len(fl))
# testspec.shape = errspec.shape
# print(testspec)

# Create a CovarianceMatrix object
cov = CovarianceMatrix(dataspec, 0, 20, debug=True)
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import SPEX, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/Gl51/Gl51RA.hdf5", orders=np.array([0]))
myInstrument = SPEX()
myHDF5Interface = HDF5Interface("../../libraries/PHOENIX_SPEX_M.hdf5")

#Load a model using the JSON file
#Taken from:
# /home/ian/Grad/Research/Disks/StellarSpectra/output/Gl51/PHOENIX/RA/region/logg/4_8sig/
myModel = Model.from_json("Gl51_model0_final.json", myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[0]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[0]
fl = spec.fls[0]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[0]
cov = myModel.OrderModels[0].get_Cov().todense()

np.save("Gl51_covariance_matrix.npy", cov)
import sys
sys.exit()
Пример #12
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(
        prog="plotly_model.py",
        description="Plot the model and residuals using plot.ly")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params",
                        help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json:  #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params:  #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'],
                                       orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument,
                              myHDF5Interface)

    for model in myModel.OrderModels:

        #Get the data
        wl, fl = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get residuals
        residuals = model.get_residuals()

        name = "Order {}".format(model.order)

        url = plotly_order(name, wl, fl, flm, residuals)
        print(url)
Пример #13
0
#Open the yaml file in this directory
yaml_file = args.run + "/input.yaml"
f = open(yaml_file)
config = yaml.load(f)
f.close()

#Use the model_final.json to figure out how many orders there are
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface

#Figure out what the relative path is to base
import StellarSpectra
base = StellarSpectra.__file__[:-26]

myDataSpectrum = DataSpectrum.open(base + config['data'],
                                   orders=config['orders'])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

myModel = Model.from_json(args.run + "/model_final.json", myDataSpectrum,
                          myInstrument, myHDF5Interface)
orders = [orderModel.order for orderModel in myModel.OrderModels]

flot_plots = {22: "Hi"}

#If the Jinja templater is going to work, it needs a list of orders. It also needs a list of how many regions
# are in each order
# each order, there is dictionary
#of global

#Set the categories as the decomposition of the run directory, excluding
Пример #14
0
        sys.exit("Must specify a *.json file.")

if args.params:  #
    #assert that we actually specified a *.yaml file
    if ".yaml" in args.params:
        yaml_file = args.params
        f = open(args.params)
        config = yaml.load(f)
        f.close()

    else:
        import sys
        sys.exit("Must specify a *.yaml file.")
        yaml_file = args.params

myDataSpectrum = DataSpectrum.open(config['data_dir'], orders=config['orders'])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(config['HDF5_path'])

stellar_Starting = config['stellar_params']
stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

cheb_Starting = config['cheb_params']
cheb_tuple = ("logc0", "c1", "c2", "c3")

cov_Starting = config['cov_params']
cov_tuple = C.dictkeys_to_cov_global_tuple(cov_Starting)

region_tuple = ("h", "loga", "mu", "sigma")
region_MH_cov = np.array([0.05, 0.04, 0.02, 0.02])**2 * np.identity(
    len(region_tuple))
Пример #15
0
yaml_file = args.run + "/input.yaml"
f = open(yaml_file)
config = yaml.load(f)
f.close()

# Use the model_final.json to figure out how many orders there are
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface

# Figure out what the relative path is to base
import StellarSpectra

base = StellarSpectra.__file__[:-26]

myDataSpectrum = DataSpectrum.open(base + config["data"], orders=config["orders"])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(base + config["HDF5_path"])

myModel = Model.from_json(args.run + "/model_final.json", myDataSpectrum, myInstrument, myHDF5Interface)
orders = [orderModel.order for orderModel in myModel.OrderModels]

flot_plots = {22: "Hi"}

# If the Jinja templater is going to work, it needs a list of orders. It also needs a list of how many regions
# are in each order
# each order, there is dictionary
# of global

# Set the categories as the decomposition of the run directory, excluding
# output and the "run00" directory.
Пример #16
0
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5",
                                   orders=np.array([21, 22, 23]))

myInstrument = TRES()

myHDF5Interface = HDF5Interface("../../libraries/PHOENIX_TRES_F.hdf5")

#Load a model using the JSON file
myModel = Model.from_json("WASP14_PHOENIX_model0_final.json", myDataSpectrum,
                          myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

np.save("PHOENIX_covariance_matrix.npy", cov)
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, SPEX, HDF5Interface
from StellarSpectra import utils


myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([21, 22, 23]))
myInstrument = TRES()
myHDF5Interface = HDF5Interface("../../libraries/Kurucz_TRES.hdf5")

# Load a model using the JSON file
# Taken from:
# /n/home07/iczekala/StellarSpectra/output/WASP14/Kurucz/21_22_23/logg/cov/2014-08-06/run18
myModel = Model.from_json("WASP14_Kurucz_logg_model_final.json", myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

draws = utils.random_draws(cov, num=50)
Пример #18
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(
        prog="flot_model.py",
        description="Plot the model and residuals using flot.")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params",
                        help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json:  #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params:  #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'],
                                       orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument,
                              myHDF5Interface)

    for model in myModel.OrderModels:

        #If an order has regions, read these out from model_final.json
        region_dict = model.get_regions_dict()
        print("Region dict", region_dict)
        #loop through these to determine the wavelength of each
        wl_regions = [value["mu"] for value in region_dict.values()]

        #Make vertical markings at the location of the wl_regions.

        #Get the data, sigmas, and mask
        wl, fl, sigma, mask = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get chebyshev
        cheb = model.get_Cheb()

        name = "Order {}".format(model.order)

        plot_data = order_json(wl, fl, sigma, mask, flm, cheb)
        plot_data.update({"wl_regions": wl_regions})
        print(plot_data['wl_regions'])

        render_template(base, plot_data)
Пример #19
0
if args.params: #
    #assert that we actually specified a *.yaml file
    if ".yaml" in args.params:
        yaml_file = args.params
        f = open(args.params)
        config = yaml.load(f)
        f.close()

    else:
        import sys
        sys.exit("Must specify a *.yaml file.")
        yaml_file = args.params


myDataSpectrum = DataSpectrum.open(config['data_dir'], orders=config['orders'])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(config['HDF5_path'])

stellar_Starting = config['stellar_params']
stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

cheb_Starting = config['cheb_params']
cheb_tuple = ("logc0", "c1", "c2", "c3")

cov_Starting = config['cov_params']
cov_tuple = C.dictkeys_to_cov_global_tuple(cov_Starting)

region_tuple = ("h", "loga", "mu", "sigma")
region_MH_cov = np.array([0.05, 0.04, 0.02, 0.02])**2 * np.identity(len(region_tuple))
Пример #20
0
def line_classes_panel():
    fig, ax = plt.subplots(nrows=2, ncols=4, figsize=(6,4.))


    spec = DataSpectrum.open("data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=[22,23])
    fl23, fl24 = spec.fls
    sig23, sig24 = spec.sigmas

    #Model?
    wlsz23 = np.load("residuals/wlsz23.npy")[0]
    f23 = np.load("residuals/fl23.npy")[0]
    wlsz24 = np.load("residuals/wlsz24.npy")[0]
    f24 = np.load("residuals/fl24.npy")[0]

    #import matplotlib
    #font = {'size' : 8}

    #matplotlib.rc('font', **font)
    #matplotlib.rc('labelsize', **font)
    r23 = (fl23 - f23)/sig23
    r24 = (fl24 - f24)/sig24
    fl23 /= 2e-13
    fl24 /= 2e-13
    f23 /= 2e-13
    f24 /= 2e-13

    ax[0,0].plot(wlsz23, fl23, "b", label="data")
    ax[0,0].plot(wlsz23, f23, "r", label="model")

    ax[1,0].plot(wlsz23, r23, "g")
    ax[0,0].set_xlim(5136.4, 5140.4)
    ax[1,0].set_xlim(5136.4, 5140.4)
    ax[1,0].set_ylim(-4,4)

    ax[0,1].plot(wlsz23, fl23, "b")
    ax[0,1].plot(wlsz23, f23, "r")
    ax[1,1].plot(wlsz23, r23, "g")
    ax[0,1].set_xlim(5188, 5189.5)
    ax[1,1].set_xlim(5188, 5189.5)
    #ax[1,1].set_ylim(-4,4)

    ax[0,2].plot(wlsz24, fl24, "b", label='data')
    ax[0,2].plot(wlsz24, f24, "r", label='model')
    ax[0,2].legend(loc="lower center", prop={'size':10})

    ax[1,2].plot(wlsz24, r24, "g")
    ax[0,2].set_xlim(5258, 5260)
    ax[1,2].set_xlim(5258, 5260)

    ax[0,3].plot(wlsz24, fl24, "b")
    ax[0,3].plot(wlsz24, f24, "r")
    ax[1,3].plot(wlsz24, r24, "g")
    ax[0,3].set_xlim(5260, 5271)
    ax[1,3].set_xlim(5260, 5271)
    ax[1,3].set_ylim(-15, 15)


    for j in range(4):
        labels = ax[1,j].get_xticklabels()
        for label in labels:
            label.set_rotation(60)

    ax[0,0].set_ylabel(r"$\propto f_\lambda$")
    ax[1,0].set_ylabel(r"Residuals$/\sigma_P$")

    for i in range(2):
        for j in range(4):
            ax[i,j].xaxis.set_major_formatter(FSF("%.0f"))
            ax[i,j].xaxis.set_major_locator(MultipleLocator(1.))
            ax[i,j].tick_params(axis='both', which='major', labelsize=10)

    for i in [0,1]:
        for j in [1,2]:
            ax[i,j].xaxis.set_major_formatter(FSF("%.1f"))
            ax[i,j].xaxis.set_major_locator(MultipleLocator(0.5))

    for i in [0,1]:
        ax[i,3].xaxis.set_major_formatter(FSF("%.0f"))
        ax[i,3].xaxis.set_major_locator(MultipleLocator(2))

    class_label = ["0", "I", "II", "III"]
    for j in range(4):
        ax[0,j].set_title(class_label[j])
        ax[0,j].xaxis.set_ticklabels([])
        ax[0,j].set_ylim(0.25, 1.15)
        if j != 0:
            ax[0,j].yaxis.set_ticklabels([])

    fig.subplots_adjust(left=0.09, right=0.99, top=0.94, bottom=0.18, hspace=0.1, wspace=0.27)
    fig.text(0.48, 0.02, r"$\lambda$ (\AA)")
    fig.savefig("plots/badlines.png")
Пример #21
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(prog="flot_model.py", description="Plot the model and residuals using flot.")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params", help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json: #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params: #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'], orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument, myHDF5Interface)

    for model in myModel.OrderModels:

        #If an order has regions, read these out from model_final.json
        region_dict = model.get_regions_dict()
        print("Region dict", region_dict)
        #loop through these to determine the wavelength of each
        wl_regions = [value["mu"] for value in region_dict.values()]

        #Make vertical markings at the location of the wl_regions.

        #Get the data, sigmas, and mask
        wl, fl, sigma, mask = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get chebyshev
        cheb = model.get_Cheb()

        name = "Order {}".format(model.order)

        plot_data = order_json(wl, fl, sigma, mask, flm, cheb)
        plot_data.update({"wl_regions":wl_regions})
        print(plot_data['wl_regions'])

        render_template(base, plot_data)
Пример #22
0
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface
import StellarSpectra.constants as C
import numpy as np
import sys
from emcee.utils import MPIPool

myDataSpectrum = DataSpectrum.open("../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=np.array([22]))
myInstrument = TRES()
myHDF5Interface = HDF5Interface("../libraries/PHOENIX_submaster.hdf5")

stellar_Starting = {"temp":(6000, 6100), "logg":(3.9, 4.2), "Z":(-0.6, -0.3), "vsini":(4, 6), "vz":(15.0, 16.0), "logOmega":(-19.665, -19.664)}

stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

#cheb_Starting = {"c1": (-.02, -0.015), "c2": (-.0195, -0.0165), "c3": (-.005, .0)}
cheb_Starting = {"logc0": (-0.02, 0.02), "c1": (-.02, 0.02), "c2": (-0.02, 0.02), "c3": (-.02, 0.02)}
cov_Starting = {"sigAmp":(0.9, 1.1), "logAmp":(-14.4, -14), "l":(0.1, 0.25)}
cov_tuple = C.dictkeys_to_covtuple(cov_Starting)

myModel = Model(myDataSpectrum, myInstrument, myHDF5Interface, stellar_tuple=stellar_tuple, cov_tuple=cov_tuple)




def eval0():
    myModel.evaluate()

def eval1():
    myModel.evaluate()