Example #1
0
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open(
            "../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux",
            orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface(
            "../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {
            "temp": 6000,
            "logg": 4.05,
            "Z": -0.4,
            "vsini": 10.5,
            "vz": 15.5,
            "logOmega": -19.665
        }
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum,
                           self.Instrument,
                           self.HDF5Interface,
                           stellar_tuple=stellar_tuple,
                           cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple,
                           region_tuple=region_tuple,
                           outdir="")
Example #2
0
    def __init__(self, DataSpectrum, Instrument, LibraryHA, LibraryLA,
                 parameters, deltaParameters):
        '''Initialize the comparison object.

        :param DataSpectrum: the spectrum that provides a wl grid + natural resolution
        :type DataSpectrum: :obj:`grid_tools.DataSpectrum`
        :param Instrument: the instrument object on which the DataSpectrum was acquired (ie, TRES, SPEX...)
        :type Instrument: :obj:`grid_tools.Instrument`
        :param LibraryHA: the path to the native resolution spectral library
        :type LibraryHA: string
        :param LibraryLA: the path to the approximate spectral library
        :type LibraryLA: string

        '''

        self.DataSpectrum = DataSpectrum
        self.Instrument = Instrument

        self.HDF5InterfaceHA = HDF5Interface(LibraryHA)
        self.HDF5InterfaceLA = HDF5Interface(LibraryLA)

        print("Bounds of the grids are")
        print("HA", self.HDF5InterfaceHA.bounds)
        print("LA", self.HDF5InterfaceLA.bounds)

        #If the DataSpectrum contains more than one order, we only take the first one. To get behavior with a
        # different order, you should only load that via the DataSpectrum(orders=[22]) flag.
        self.wl = self.DataSpectrum.wls[0]

        self.fullModelLA = Model(self.DataSpectrum,
                                 self.Instrument,
                                 self.HDF5InterfaceLA,
                                 stellar_tuple=("temp", "logg", "Z", "vsini",
                                                "vz", "logOmega"),
                                 cheb_tuple=("c1", "c2", "c3"),
                                 cov_tuple=("sigAmp", "logAmp", "l"),
                                 region_tuple=("loga", "mu", "sigma"))
        self.modelLA = self.fullModelLA.OrderModels[0]

        self.fullModelHA = ModelHA(self.DataSpectrum,
                                   self.Instrument,
                                   self.HDF5InterfaceHA,
                                   stellar_tuple=("temp", "logg", "Z", "vsini",
                                                  "vz", "logOmega"),
                                   cheb_tuple=("c1", "c2", "c3"),
                                   cov_tuple=("sigAmp", "logAmp", "l"),
                                   region_tuple=("loga", "mu", "sigma"))
        self.modelHA = self.fullModelHA.OrderModels[0]

        self.parameters = parameters
        self.deltaParameters = deltaParameters

        self.base = self.get_specHA(self.parameters)
        self.baseLA = self.get_specLA(self.parameters)
        self.approxResid = get_resid_spec(
            self.base, self.baseLA)  #modelHA - modelLA @ parameters
Example #3
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(prog="plotly_model.py", description="Plot the model and residuals using plot.ly")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params", help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json: #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params: #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'], orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument, myHDF5Interface)

    for model in myModel.OrderModels:

        #Get the data
        wl, fl = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get residuals
        residuals = model.get_residuals()

        name = "Order {}".format(model.order)

        url = plotly_order(name, wl, fl, flm, residuals)
        print(url)
Example #4
0
    def __init__(self, DataSpectrum, Instrument, LibraryHA, LibraryLA, parameters, deltaParameters):
        """Initialize the comparison object.

        :param DataSpectrum: the spectrum that provides a wl grid + natural resolution
        :type DataSpectrum: :obj:`grid_tools.DataSpectrum`
        :param Instrument: the instrument object on which the DataSpectrum was acquired (ie, TRES, SPEX...)
        :type Instrument: :obj:`grid_tools.Instrument`
        :param LibraryHA: the path to the native resolution spectral library
        :type LibraryHA: string
        :param LibraryLA: the path to the approximate spectral library
        :type LibraryLA: string

        """

        self.DataSpectrum = DataSpectrum
        self.Instrument = Instrument

        self.HDF5InterfaceHA = HDF5Interface(LibraryHA)
        self.HDF5InterfaceLA = HDF5Interface(LibraryLA)

        print("Bounds of the grids are")
        print("HA", self.HDF5InterfaceHA.bounds)
        print("LA", self.HDF5InterfaceLA.bounds)

        # If the DataSpectrum contains more than one order, we only take the first one. To get behavior with a
        # different order, you should only load that via the DataSpectrum(orders=[22]) flag.
        self.wl = self.DataSpectrum.wls[0]

        self.fullModelLA = Model(
            self.DataSpectrum,
            self.Instrument,
            self.HDF5InterfaceLA,
            stellar_tuple=("temp", "logg", "Z", "vsini", "vz", "logOmega"),
            cheb_tuple=("c1", "c2", "c3"),
            cov_tuple=("sigAmp", "logAmp", "l"),
            region_tuple=("loga", "mu", "sigma"),
        )
        self.modelLA = self.fullModelLA.OrderModels[0]

        self.fullModelHA = ModelHA(
            self.DataSpectrum,
            self.Instrument,
            self.HDF5InterfaceHA,
            stellar_tuple=("temp", "logg", "Z", "vsini", "vz", "logOmega"),
            cheb_tuple=("c1", "c2", "c3"),
            cov_tuple=("sigAmp", "logAmp", "l"),
            region_tuple=("loga", "mu", "sigma"),
        )
        self.modelHA = self.fullModelHA.OrderModels[0]

        self.parameters = parameters
        self.deltaParameters = deltaParameters

        self.base = self.get_specHA(self.parameters)
        self.baseLA = self.get_specLA(self.parameters)
        self.approxResid = get_resid_spec(self.base, self.baseLA)  # modelHA - modelLA @ parameters
Example #5
0
class TestModel:
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open("../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface("../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {"temp":6000, "logg":4.05, "Z":-0.4, "vsini":10.5, "vz":15.5, "logOmega":-19.665}
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum, self.Instrument, self.HDF5Interface, stellar_tuple=stellar_tuple, cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple, region_tuple=region_tuple, outdir="")

    def test_update(self):
        self.Model.OrderModels[0].update_Cheb({"c1": -0.017, "c2": -0.017, "c3": -0.003})
        cov_Starting = {"sigAmp":1, "logAmp":-14.0, "l":0.15}
        self.Model.OrderModels[0].update_Cov(cov_Starting)

        params = {"temp":6005, "logg":4.05, "Z":-0.4, "vsini":10.5, "vz":15.5, "logOmega":-19.665}
        self.Model.update_Model(params) #This also updates downsampled_fls
        #For order in myModel, do evaluate, and sum the results.

    def test_evaluate(self):
        self.Model.evaluate()

    def test_to_json(self):
        self.Model.to_json()

    def test_from_json(self):
        newModel = Model.from_json("final_model.json", self.DataSpectrum, self.Instrument, self.HDF5Interface)
Example #6
0
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open("../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux", orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface("../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {"temp":6000, "logg":4.05, "Z":-0.4, "vsini":10.5, "vz":15.5, "logOmega":-19.665}
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum, self.Instrument, self.HDF5Interface, stellar_tuple=stellar_tuple, cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple, region_tuple=region_tuple, outdir="")
Example #7
0
class AccuracyComparison:
    '''
    Gather the data products necessary to make a test about accuracy of the reduced grid sizes.

    '''
    def __init__(self, DataSpectrum, Instrument, LibraryHA, LibraryLA,
                 parameters, deltaParameters):
        '''Initialize the comparison object.

        :param DataSpectrum: the spectrum that provides a wl grid + natural resolution
        :type DataSpectrum: :obj:`grid_tools.DataSpectrum`
        :param Instrument: the instrument object on which the DataSpectrum was acquired (ie, TRES, SPEX...)
        :type Instrument: :obj:`grid_tools.Instrument`
        :param LibraryHA: the path to the native resolution spectral library
        :type LibraryHA: string
        :param LibraryLA: the path to the approximate spectral library
        :type LibraryLA: string

        '''

        self.DataSpectrum = DataSpectrum
        self.Instrument = Instrument

        self.HDF5InterfaceHA = HDF5Interface(LibraryHA)
        self.HDF5InterfaceLA = HDF5Interface(LibraryLA)

        print("Bounds of the grids are")
        print("HA", self.HDF5InterfaceHA.bounds)
        print("LA", self.HDF5InterfaceLA.bounds)

        #If the DataSpectrum contains more than one order, we only take the first one. To get behavior with a
        # different order, you should only load that via the DataSpectrum(orders=[22]) flag.
        self.wl = self.DataSpectrum.wls[0]

        self.fullModelLA = Model(self.DataSpectrum,
                                 self.Instrument,
                                 self.HDF5InterfaceLA,
                                 stellar_tuple=("temp", "logg", "Z", "vsini",
                                                "vz", "logOmega"),
                                 cheb_tuple=("c1", "c2", "c3"),
                                 cov_tuple=("sigAmp", "logAmp", "l"),
                                 region_tuple=("loga", "mu", "sigma"))
        self.modelLA = self.fullModelLA.OrderModels[0]

        self.fullModelHA = ModelHA(self.DataSpectrum,
                                   self.Instrument,
                                   self.HDF5InterfaceHA,
                                   stellar_tuple=("temp", "logg", "Z", "vsini",
                                                  "vz", "logOmega"),
                                   cheb_tuple=("c1", "c2", "c3"),
                                   cov_tuple=("sigAmp", "logAmp", "l"),
                                   region_tuple=("loga", "mu", "sigma"))
        self.modelHA = self.fullModelHA.OrderModels[0]

        self.parameters = parameters
        self.deltaParameters = deltaParameters

        self.base = self.get_specHA(self.parameters)
        self.baseLA = self.get_specLA(self.parameters)
        self.approxResid = get_resid_spec(
            self.base, self.baseLA)  #modelHA - modelLA @ parameters

    def get_specHA(self, parameters):
        '''
        Update the model and then query the spectrum

        :param parameters: Dictionary of fundamental stellar parameters
        :type parameters: dict

        :returns: flux spectrum
        '''

        params = parameters.copy()
        params.update({"vsini": 0., "vz": 0, "logOmega": 0.})
        self.fullModelHA.update_Model(params)

        return self.modelHA.get_spectrum()

    def get_specLA(self, parameters):
        '''
        Update the model and then query the spectrum

        :param parameters: Dictionary of fundamental stellar parameters
        :type parameters: dict

        :returns: flux spectrum
        '''

        params = parameters.copy()
        params.update({"vsini": 0., "vz": 0, "logOmega": 0.})
        self.fullModelLA.update_Model(params)

        return self.modelLA.get_spectrum()

    def createEnvelopeSpectrum(self, direction='both'):
        '''
        The parameters should always be specified at a grid point of the HDF5 file.

        For this, do the deltaParameters interpolation.

        Direction specifies whether to do interpolation up (+ 10 K, etc.), down (- 10 K), or
        do both and then find the minimum envelope between the two.
        For now, only up is implemented.

        '''
        #For each key, add the delta parameters
        temp_params = self.parameters.copy()
        temp_params["temp"] += self.deltaParameters["temp"]
        temp_spec = get_resid_spec(self.base, self.get_specHA(temp_params))

        logg_params = self.parameters.copy()
        logg_params["logg"] += self.deltaParameters["logg"]
        logg_spec = get_resid_spec(self.base, self.get_specHA(logg_params))

        Z_params = self.parameters.copy()
        Z_params["Z"] += self.deltaParameters["Z"]
        Z_spec = get_resid_spec(self.base, self.get_specHA(Z_params))

        self.envelope = get_min_spec([temp_spec, logg_spec, Z_spec])

    def plot_quality(self):
        '''
        Visualize the quality of the interpolation.

        Two-panel plot.

        Top: HA and LA spectrum

        Bottom: Residual between HA + LA spectrum and the HA spectrum error bounds for deltaParameters

        '''

        self.createEnvelopeSpectrum()

        fig, ax = plt.subplots(nrows=2, figsize=(8, 6), sharex=True)
        ax[0].plot(self.wl, self.base, "b", label="HA")
        ax[0].plot(self.wl, self.baseLA, "r", label="LA")
        ax[0].legend()
        ax[0].set_ylabel(r"$\propto f_\lambda$")
        ax[0].set_title(
            "Temp={temp:} logg={logg:} Z={Z:}".format(**self.parameters))

        ax[1].semilogy(self.wl, self.approxResid, "k", label="(HA - LA)/HA")
        ax[1].semilogy(self.wl, self.envelope, "b", label="Interp Envelope")
        ax[1].legend()
        ax[1].set_xlabel(r"$\lambda$\AA")
        ax[1].set_ylabel("fractional error")

        return fig
Example #8
0
myHDF5Interface = HDF5Interface(config['HDF5_path'])

stellar_Starting = config['stellar_params']
stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

cheb_Starting = config['cheb_params']
cheb_tuple = ("logc0", "c1", "c2", "c3")

cov_Starting = config['cov_params']
cov_tuple = C.dictkeys_to_cov_global_tuple(cov_Starting)

region_tuple = ("h", "loga", "mu", "sigma")
region_MH_cov = np.array([0.05, 0.04, 0.02, 0.02])**2 * np.identity(
    len(region_tuple))

myModel = Model.from_json(args.json, myDataSpectrum, myInstrument,
                          myHDF5Interface)

model = myModel.OrderModels[0]

#Get the data
wl, fl = model.get_data()

#Get the model flux
flm = model.get_spectrum()

#Get residuals
residuals = model.get_residuals()

#Get the Chebyshev spectrum
cheb = model.get_Cheb()
Example #9
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(
        prog="flot_model.py",
        description="Plot the model and residuals using flot.")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params",
                        help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json:  #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params:  #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'],
                                       orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument,
                              myHDF5Interface)

    for model in myModel.OrderModels:

        #If an order has regions, read these out from model_final.json
        region_dict = model.get_regions_dict()
        print("Region dict", region_dict)
        #loop through these to determine the wavelength of each
        wl_regions = [value["mu"] for value in region_dict.values()]

        #Make vertical markings at the location of the wl_regions.

        #Get the data, sigmas, and mask
        wl, fl, sigma, mask = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get chebyshev
        cheb = model.get_Cheb()

        name = "Order {}".format(model.order)

        plot_data = order_json(wl, fl, sigma, mask, flm, cheb)
        plot_data.update({"wl_regions": wl_regions})
        print(plot_data['wl_regions'])

        render_template(base, plot_data)
Example #10
0
myHDF5Interface = HDF5Interface(config['HDF5_path'])

stellar_Starting = config['stellar_params']
stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

cheb_Starting = config['cheb_params']
cheb_tuple = ("logc0", "c1", "c2", "c3")

cov_Starting = config['cov_params']
cov_tuple = C.dictkeys_to_cov_global_tuple(cov_Starting)

region_tuple = ("h", "loga", "mu", "sigma")
region_MH_cov = np.array([0.05, 0.04, 0.02, 0.02])**2 * np.identity(len(region_tuple))


myModel = Model.from_json(args.json, myDataSpectrum, myInstrument, myHDF5Interface)

model = myModel.OrderModels[0]

#Get the data
wl, fl = model.get_data()

#Get the model flux
flm = model.get_spectrum()

#Get residuals
residuals = model.get_residuals()

#Get the Chebyshev spectrum
cheb = model.get_Cheb()
Example #11
0
#Use the model_final.json to figure out how many orders there are
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface

#Figure out what the relative path is to base
import StellarSpectra
base = StellarSpectra.__file__[:-26]

myDataSpectrum = DataSpectrum.open(base + config['data'],
                                   orders=config['orders'])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

myModel = Model.from_json(args.run + "/model_final.json", myDataSpectrum,
                          myInstrument, myHDF5Interface)
orders = [orderModel.order for orderModel in myModel.OrderModels]

flot_plots = {22: "Hi"}

#If the Jinja templater is going to work, it needs a list of orders. It also needs a list of how many regions
# are in each order
# each order, there is dictionary
#of global

#Set the categories as the decomposition of the run directory, excluding
#output and the "run00" directory.
#For example, output/WASP14/Kurucz/22/run01 becomes categories="WASP14 Kurucz 22"
categories = " ".join(args.run.split("/")[1:-1])

templateVars = {
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([21,22,23]))

myInstrument = TRES()

myHDF5Interface = HDF5Interface("../../libraries/PHOENIX_TRES_F.hdf5")

#Load a model using the JSON file
myModel = Model.from_json("WASP14_PHOENIX_model0_final.json", myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

np.save("PHOENIX_covariance_matrix.npy", cov)

import sys; sys.exit()
Example #13
0
 def test_from_json(self):
     newModel = Model.from_json("final_model.json", self.DataSpectrum,
                                self.Instrument, self.HDF5Interface)
Example #14
0
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5",
                                   orders=np.array([21, 22, 23]))

myInstrument = TRES()

myHDF5Interface = HDF5Interface("../../libraries/PHOENIX_TRES_F.hdf5")

#Load a model using the JSON file
myModel = Model.from_json("WASP14_PHOENIX_model0_final.json", myDataSpectrum,
                          myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

np.save("PHOENIX_covariance_matrix.npy", cov)
Example #15
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(
        prog="plotly_model.py",
        description="Plot the model and residuals using plot.ly")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params",
                        help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json:  #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params:  #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'],
                                       orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument,
                              myHDF5Interface)

    for model in myModel.OrderModels:

        #Get the data
        wl, fl = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get residuals
        residuals = model.get_residuals()

        name = "Order {}".format(model.order)

        url = plotly_order(name, wl, fl, flm, residuals)
        print(url)
Example #16
0
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, SPEX, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5",
                                   orders=np.array([21, 22, 23]))
myInstrument = TRES()
myHDF5Interface = HDF5Interface("../../libraries/Kurucz_TRES.hdf5")

#Load a model using the JSON file
#Taken from:
#/n/home07/iczekala/StellarSpectra/output/WASP14/Kurucz/21_22_23/logg/cov/2014-08-06/run18
myModel = Model.from_json("WASP14_Kurucz_logg_model_final.json",
                          myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

draws = utils.random_draws(cov, num=50)
Example #17
0
class AccuracyComparison:
    """
    Gather the data products necessary to make a test about accuracy of the reduced grid sizes.

    """

    def __init__(self, DataSpectrum, Instrument, LibraryHA, LibraryLA, parameters, deltaParameters):
        """Initialize the comparison object.

        :param DataSpectrum: the spectrum that provides a wl grid + natural resolution
        :type DataSpectrum: :obj:`grid_tools.DataSpectrum`
        :param Instrument: the instrument object on which the DataSpectrum was acquired (ie, TRES, SPEX...)
        :type Instrument: :obj:`grid_tools.Instrument`
        :param LibraryHA: the path to the native resolution spectral library
        :type LibraryHA: string
        :param LibraryLA: the path to the approximate spectral library
        :type LibraryLA: string

        """

        self.DataSpectrum = DataSpectrum
        self.Instrument = Instrument

        self.HDF5InterfaceHA = HDF5Interface(LibraryHA)
        self.HDF5InterfaceLA = HDF5Interface(LibraryLA)

        print("Bounds of the grids are")
        print("HA", self.HDF5InterfaceHA.bounds)
        print("LA", self.HDF5InterfaceLA.bounds)

        # If the DataSpectrum contains more than one order, we only take the first one. To get behavior with a
        # different order, you should only load that via the DataSpectrum(orders=[22]) flag.
        self.wl = self.DataSpectrum.wls[0]

        self.fullModelLA = Model(
            self.DataSpectrum,
            self.Instrument,
            self.HDF5InterfaceLA,
            stellar_tuple=("temp", "logg", "Z", "vsini", "vz", "logOmega"),
            cheb_tuple=("c1", "c2", "c3"),
            cov_tuple=("sigAmp", "logAmp", "l"),
            region_tuple=("loga", "mu", "sigma"),
        )
        self.modelLA = self.fullModelLA.OrderModels[0]

        self.fullModelHA = ModelHA(
            self.DataSpectrum,
            self.Instrument,
            self.HDF5InterfaceHA,
            stellar_tuple=("temp", "logg", "Z", "vsini", "vz", "logOmega"),
            cheb_tuple=("c1", "c2", "c3"),
            cov_tuple=("sigAmp", "logAmp", "l"),
            region_tuple=("loga", "mu", "sigma"),
        )
        self.modelHA = self.fullModelHA.OrderModels[0]

        self.parameters = parameters
        self.deltaParameters = deltaParameters

        self.base = self.get_specHA(self.parameters)
        self.baseLA = self.get_specLA(self.parameters)
        self.approxResid = get_resid_spec(self.base, self.baseLA)  # modelHA - modelLA @ parameters

    def get_specHA(self, parameters):
        """
        Update the model and then query the spectrum

        :param parameters: Dictionary of fundamental stellar parameters
        :type parameters: dict

        :returns: flux spectrum
        """

        params = parameters.copy()
        params.update({"vsini": 0.0, "vz": 0, "logOmega": 0.0})
        self.fullModelHA.update_Model(params)

        return self.modelHA.get_spectrum()

    def get_specLA(self, parameters):
        """
        Update the model and then query the spectrum

        :param parameters: Dictionary of fundamental stellar parameters
        :type parameters: dict

        :returns: flux spectrum
        """

        params = parameters.copy()
        params.update({"vsini": 0.0, "vz": 0, "logOmega": 0.0})
        self.fullModelLA.update_Model(params)

        return self.modelLA.get_spectrum()

    def createEnvelopeSpectrum(self, direction="both"):
        """
        The parameters should always be specified at a grid point of the HDF5 file.

        For this, do the deltaParameters interpolation.

        Direction specifies whether to do interpolation up (+ 10 K, etc.), down (- 10 K), or
        do both and then find the minimum envelope between the two.
        For now, only up is implemented.

        """
        # For each key, add the delta parameters
        temp_params = self.parameters.copy()
        temp_params["temp"] += self.deltaParameters["temp"]
        temp_spec = get_resid_spec(self.base, self.get_specHA(temp_params))

        logg_params = self.parameters.copy()
        logg_params["logg"] += self.deltaParameters["logg"]
        logg_spec = get_resid_spec(self.base, self.get_specHA(logg_params))

        Z_params = self.parameters.copy()
        Z_params["Z"] += self.deltaParameters["Z"]
        Z_spec = get_resid_spec(self.base, self.get_specHA(Z_params))

        self.envelope = get_min_spec([temp_spec, logg_spec, Z_spec])

    def plot_quality(self):
        """
        Visualize the quality of the interpolation.

        Two-panel plot.

        Top: HA and LA spectrum

        Bottom: Residual between HA + LA spectrum and the HA spectrum error bounds for deltaParameters

        """

        self.createEnvelopeSpectrum()

        fig, ax = plt.subplots(nrows=2, figsize=(8, 6), sharex=True)
        ax[0].plot(self.wl, self.base, "b", label="HA")
        ax[0].plot(self.wl, self.baseLA, "r", label="LA")
        ax[0].legend()
        ax[0].set_ylabel(r"$\propto f_\lambda$")
        ax[0].set_title("Temp={temp:} logg={logg:} Z={Z:}".format(**self.parameters))

        ax[1].semilogy(self.wl, self.approxResid, "k", label="(HA - LA)/HA")
        ax[1].semilogy(self.wl, self.envelope, "b", label="Interp Envelope")
        ax[1].legend()
        ax[1].set_xlabel(r"$\lambda$\AA")
        ax[1].set_ylabel("fractional error")

        return fig
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, SPEX, HDF5Interface
from StellarSpectra import utils


myDataSpectrum = DataSpectrum.open("../../data/WASP14/WASP14-2009-06-14.hdf5", orders=np.array([21, 22, 23]))
myInstrument = TRES()
myHDF5Interface = HDF5Interface("../../libraries/Kurucz_TRES.hdf5")

# Load a model using the JSON file
# Taken from:
# /n/home07/iczekala/StellarSpectra/output/WASP14/Kurucz/21_22_23/logg/cov/2014-08-06/run18
myModel = Model.from_json("WASP14_Kurucz_logg_model_final.json", myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[1]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[1]
fl = spec.fls[1]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[1]
cov = myModel.OrderModels[1].get_Cov().todense()

draws = utils.random_draws(cov, num=50)
Example #19
0
 def test_from_json(self):
     newModel = Model.from_json("final_model.json", self.DataSpectrum, self.Instrument, self.HDF5Interface)
Example #20
0
class TestModel:
    def setup_class(self):
        self.DataSpectrum = DataSpectrum.open(
            "../data/WASP14/WASP-14_2009-06-15_04h13m57s_cb.spec.flux",
            orders=np.array([22]))
        self.Instrument = TRES()
        self.HDF5Interface = HDF5Interface(
            "../libraries/PHOENIX_submaster.hdf5")

        stellar_Starting = {
            "temp": 6000,
            "logg": 4.05,
            "Z": -0.4,
            "vsini": 10.5,
            "vz": 15.5,
            "logOmega": -19.665
        }
        stellar_tuple = C.dictkeys_to_tuple(stellar_Starting)

        cheb_tuple = ("c1", "c2", "c3")
        cov_tuple = ("sigAmp", "logAmp", "l")
        region_tuple = ("h", "loga", "mu", "sigma")

        self.Model = Model(self.DataSpectrum,
                           self.Instrument,
                           self.HDF5Interface,
                           stellar_tuple=stellar_tuple,
                           cheb_tuple=cheb_tuple,
                           cov_tuple=cov_tuple,
                           region_tuple=region_tuple,
                           outdir="")

    def test_update(self):
        self.Model.OrderModels[0].update_Cheb({
            "c1": -0.017,
            "c2": -0.017,
            "c3": -0.003
        })
        cov_Starting = {"sigAmp": 1, "logAmp": -14.0, "l": 0.15}
        self.Model.OrderModels[0].update_Cov(cov_Starting)

        params = {
            "temp": 6005,
            "logg": 4.05,
            "Z": -0.4,
            "vsini": 10.5,
            "vz": 15.5,
            "logOmega": -19.665
        }
        self.Model.update_Model(params)  #This also updates downsampled_fls
        #For order in myModel, do evaluate, and sum the results.

    def test_evaluate(self):
        self.Model.evaluate()

    def test_to_json(self):
        self.Model.to_json()

    def test_from_json(self):
        newModel = Model.from_json("final_model.json", self.DataSpectrum,
                                   self.Instrument, self.HDF5Interface)
import numpy as np
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import SPEX, HDF5Interface
from StellarSpectra import utils

myDataSpectrum = DataSpectrum.open("../../data/Gl51/Gl51RA.hdf5", orders=np.array([0]))
myInstrument = SPEX()
myHDF5Interface = HDF5Interface("../../libraries/PHOENIX_SPEX_M.hdf5")

#Load a model using the JSON file
#Taken from:
# /home/ian/Grad/Research/Disks/StellarSpectra/output/Gl51/PHOENIX/RA/region/logg/4_8sig/
myModel = Model.from_json("Gl51_model0_final.json", myDataSpectrum, myInstrument, myHDF5Interface)

myOrderModel = myModel.OrderModels[0]
model_flux = myOrderModel.get_spectrum()

spec = myModel.get_data()
wl = spec.wls[0]
fl = spec.fls[0]

model_fl = myOrderModel.get_spectrum()
residuals = fl - model_fl

mask = spec.masks[0]
cov = myModel.OrderModels[0].get_Cov().todense()

np.save("Gl51_covariance_matrix.npy", cov)
import sys
sys.exit()
Example #22
0
def main():
    #Use argparse to determine if we've specified a config file
    import argparse
    parser = argparse.ArgumentParser(prog="flot_model.py", description="Plot the model and residuals using flot.")
    parser.add_argument("json", help="*.json file describing the model.")
    parser.add_argument("params", help="*.yaml file specifying run parameters.")
    # parser.add_argument("-o", "--output", help="*.html file for output")
    args = parser.parse_args()

    import json
    import yaml

    if args.json: #
        #assert that we actually specified a *.json file
        if ".json" not in args.json:
            import sys
            sys.exit("Must specify a *.json file.")

    if args.params: #
        #assert that we actually specified a *.yaml file
        if ".yaml" in args.params:
            yaml_file = args.params
            f = open(args.params)
            config = yaml.load(f)
            f.close()

        else:
            import sys
            sys.exit("Must specify a *.yaml file.")
            yaml_file = args.params

    from StellarSpectra.model import Model
    from StellarSpectra.spectrum import DataSpectrum
    from StellarSpectra.grid_tools import TRES, HDF5Interface

    #Figure out what the relative path is to base
    import StellarSpectra
    base = StellarSpectra.__file__[:-26]

    myDataSpectrum = DataSpectrum.open(base + config['data'], orders=config['orders'])
    myInstrument = TRES()
    myHDF5Interface = HDF5Interface(base + config['HDF5_path'])

    myModel = Model.from_json(args.json, myDataSpectrum, myInstrument, myHDF5Interface)

    for model in myModel.OrderModels:

        #If an order has regions, read these out from model_final.json
        region_dict = model.get_regions_dict()
        print("Region dict", region_dict)
        #loop through these to determine the wavelength of each
        wl_regions = [value["mu"] for value in region_dict.values()]

        #Make vertical markings at the location of the wl_regions.

        #Get the data, sigmas, and mask
        wl, fl, sigma, mask = model.get_data()

        #Get the model flux
        flm = model.get_spectrum()

        #Get chebyshev
        cheb = model.get_Cheb()

        name = "Order {}".format(model.order)

        plot_data = order_json(wl, fl, sigma, mask, flm, cheb)
        plot_data.update({"wl_regions":wl_regions})
        print(plot_data['wl_regions'])

        render_template(base, plot_data)
Example #23
0
# Use the model_final.json to figure out how many orders there are
from StellarSpectra.model import Model
from StellarSpectra.spectrum import DataSpectrum
from StellarSpectra.grid_tools import TRES, HDF5Interface

# Figure out what the relative path is to base
import StellarSpectra

base = StellarSpectra.__file__[:-26]

myDataSpectrum = DataSpectrum.open(base + config["data"], orders=config["orders"])
myInstrument = TRES()
myHDF5Interface = HDF5Interface(base + config["HDF5_path"])

myModel = Model.from_json(args.run + "/model_final.json", myDataSpectrum, myInstrument, myHDF5Interface)
orders = [orderModel.order for orderModel in myModel.OrderModels]

flot_plots = {22: "Hi"}

# If the Jinja templater is going to work, it needs a list of orders. It also needs a list of how many regions
# are in each order
# each order, there is dictionary
# of global

# Set the categories as the decomposition of the run directory, excluding
# output and the "run00" directory.
# For example, output/WASP14/Kurucz/22/run01 becomes categories="WASP14 Kurucz 22"
categories = " ".join(args.run.split("/")[1:-1])