import logging

from itertools import chain
#from collections import deque
from operator import itemgetter
import yaml
import shutil
import json



Starfish.routdir = ""

# list of keys from 0 to (norders - 1)
order_keys = np.arange(1)
DataSpectra = [DataSpectrum.open(os.path.expandvars(file), orders=Starfish.data["orders"]) for file in Starfish.data["files"]]
# list of keys from 0 to (nspectra - 1) Used for indexing purposes.
spectra_keys = np.arange(len(DataSpectra))

#Instruments are provided as one per dataset
Instruments = [eval("Starfish.grid_tools." + inst)() for inst in Starfish.data["instruments"]]


logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s", filename="{}log.log".format(
    Starfish.routdir), level=logging.DEBUG, filemode="w", datefmt='%m/%d/%Y %I:%M:%S %p')

class Order:
    def __init__(self, debug=False):
        '''
        This object contains all of the variables necessary for the partial
        lnprob calculation for one echelle order. It is designed to first be
import gc
import logging

from itertools import chain
from operator import itemgetter
import yaml
import shutil
import json

orders = Starfish.data["orders"]
assert len(orders) == 1, "Can only use 1 order for now."
order = orders[0]

# Load just this order for now.
dataSpec = DataSpectrum.open(Starfish.data["files"][0], orders=Starfish.data["orders"])
instrument = eval("Starfish.grid_tools." + Starfish.data["instruments"][0])()

# full_mask = create_mask(dataSpec.wls, Starfish.data["masks"][0])
# dataSpec.add_mask(full_mask)

wl = dataSpec.wls[0]

# Truncate these to our shorter range to make it faster
# ind = (wl > 5165.) & (wl < 5185.)
# wl = wl[ind]
#
fl = dataSpec.fls[0] #[ind]
sigma = dataSpec.sigmas[0] #[ind]
# mask = dataSpec.masks[0][ind]
ndata = len(wl)
Exemple #3
0
    for model_number in range(len(Starfish.config.data["files"])):
        for order in Starfish.config.data["orders"]:
            order_dir = routdir + Starfish.specfmt.format(model_number, order)
            print("Creating ", order_dir)
            os.makedirs(order_dir)

    return routdir

if args.run_index:
    Starfish.routdir = init_directories(args.run_index)
else:
    Starfish.routdir = ""

# list of keys from 0 to (norders - 1)
order_keys = np.arange(len(Starfish.config.data["orders"]))
DataSpectra = [DataSpectrum.open(os.path.expandvars(file), orders=Starfish.config.data["orders"]) for file in Starfish.config.data["files"]]
# list of keys from 0 to (nspectra - 1) Used for indexing purposes.
spectra_keys = np.arange(len(DataSpectra))

#Instruments are provided as one per dataset
Instruments = [eval("Starfish.grid_tools." + inst)() for inst in Starfish.config.data["instruments"]]

masks = Starfish.config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, DataSpectra):
        myMask = Mask(mask, orders=Starfish.config.data["orders"])
        dataSpec.add_mask(myMask.masks)

# Set up the logger
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s", filename="{}log.log".format(
    Starfish.routdir), level=logging.DEBUG, filemode="w", datefmt='%m/%d/%Y %I:%M:%S %p')
Exemple #4
0
    for model_number in range(len(Starfish.data["files"])):
        for order in Starfish.data["orders"]:
            order_dir = routdir + Starfish.specfmt.format(model_number, order)
            print("Creating ", order_dir)
            os.makedirs(order_dir)

    return routdir

if args.run_index:
    Starfish.routdir = init_directories(args.run_index)
else:
    Starfish.routdir = ""

# list of keys from 0 to (norders - 1)
order_keys = np.arange(len(Starfish.data["orders"]))
DataSpectra = [DataSpectrum.open(file, orders=Starfish.data["orders"]) for file in Starfish.data["files"]]
# list of keys from 0 to (nspectra - 1) Used for indexing purposes.
spectra_keys = np.arange(len(DataSpectra))

#Instruments are provided as one per dataset
Instruments = [eval("Starfish.grid_tools." + inst)() for inst in Starfish.data["instruments"]]

masks = Starfish.config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, DataSpectra):
        myMask = Mask(mask, orders=Starfish.data["orders"])
        dataSpec.add_mask(myMask.masks)

# Set up the logger
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s", filename="{}log.log".format(
    Starfish.routdir), level=logging.DEBUG, filemode="w", datefmt='%m/%d/%Y %I:%M:%S %p')
Exemple #5
0
            order_dir = routdir + Starfish.specfmt.format(model_number, order)
            print("Creating ", order_dir)
            os.makedirs(order_dir)

    return routdir


if args.run_index:
    Starfish.routdir = init_directories(args.run_index)
else:
    Starfish.routdir = ""

# list of keys from 0 to (norders - 1)
order_keys = np.arange(len(Starfish.config.data["orders"]))
DataSpectra = [
    DataSpectrum.open(file, orders=Starfish.config.data["orders"])
    for file in Starfish.config.data["files"]
]
# list of keys from 0 to (nspectra - 1) Used for indexing purposes.
spectra_keys = np.arange(len(DataSpectra))

#Instruments are provided as one per dataset
Instruments = [
    eval("Starfish.grid_tools." + inst)()
    for inst in Starfish.config.data["instruments"]
]

masks = Starfish.config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, DataSpectra):
        myMask = Mask(mask, orders=Starfish.config.data["orders"])
import gc
import logging

from itertools import chain
#from collections import deque
from operator import itemgetter
import yaml
import shutil
import json

Starfish.routdir = ""

# list of keys from 0 to (norders - 1)
order_keys = np.arange(1)
DataSpectra = [
    DataSpectrum.open(os.path.expandvars(file), orders=Starfish.data["orders"])
    for file in Starfish.data["files"]
]
# list of keys from 0 to (nspectra - 1) Used for indexing purposes.
spectra_keys = np.arange(len(DataSpectra))

#Instruments are provided as one per dataset
Instruments = [
    eval("Starfish.grid_tools." + inst)()
    for inst in Starfish.data["instruments"]
]

logging.basicConfig(
    format="%(asctime)s - %(levelname)s - %(name)s -  %(message)s",
    filename="{}log.log".format(Starfish.routdir),
    level=logging.DEBUG,
Exemple #7
0
        print("Deleting", outdir)
        shutil.rmtree(outdir)

print("Creating ", outdir)
os.makedirs(outdir)

# Determine how many filenames are in config['data']. Always load as a list, even len == 1.
# If there are multiple datasets, this list will be longer than length 1
data = config["data"]
if type(data) != list:
    data = [data]
print("loading data spectra {}".format(data))
orders = config["orders"]  #list of which orders to fit
order_ids = np.arange(len(orders))
DataSpectra = [
    DataSpectrum.open(data_file, orders=orders) for data_file in data
]

# Number of different data sets we are fitting. Used for indexing purposes.
spectra = np.arange(len(DataSpectra))

INSTRUMENTS = {"TRES": TRES, "SPEX": SPEX}
#Instruments are provided as one per dataset
Instruments = [INSTRUMENTS[key]() for key in config["instruments"]]

masks = config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, DataSpectra):
        myMask = Mask(mask, orders=orders)
        dataSpec.add_mask(myMask.masks)
Exemple #8
0
                    default="all",
                    help="Which orders of the spectrum do you want to plot?")
parser.add_argument("--spec2", help="The second spectrum to plot")
parser.add_argument("--norm",
                    action="store_true",
                    help="Normalize each order to 1?")

args = parser.parse_args()

import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter as FSF
from Starfish.spectrum import DataSpectrum

spec = DataSpectrum.open(args.file, orders=args.orders)

if args.spec2:
    spec2 = DataSpectrum.open(args.spec2, orders=args.orders)

#Set up the plot.
width = 20.  #in; the size of my monitor
h = 1.5  #height per order.
norders = len(spec.wls)

matplotlib.rc("font", size=8)

fig, ax = plt.subplots(nrows=norders, figsize=(width, h * norders))

for a, wl, fl, sigma, order in zip(ax, spec.wls, spec.fls, spec.sigmas,
                                   spec.orders):
Exemple #9
0
    run_index = args.run_index
    outdir = base.format(run_index)
    #Delete this outdir, if it exists
    if os.path.exists(outdir):
        print("Deleting", outdir)
        shutil.rmtree(outdir)

print("Creating ", outdir)
os.makedirs(outdir)

#Determine how many filenames are in config['data']. Always load as a list, even len == 1.
data = config["data"]
if type(data) != list:
    data = [data]
print("loading data spectra {}".format(data))
myDataSpectra = [DataSpectrum.open(data_file, orders=config['orders']) for data_file in data]

masks = config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, myDataSpectra):
        myMask = Mask(mask, orders=config['orders'])
        dataSpec.add_mask(myMask.masks)

for model_number in range(len(myDataSpectra)):
    for order in config['orders']:
        order_dir = "{}{}/{}".format(outdir, model_number, order)
        print("Creating ", order_dir)
        os.makedirs(order_dir)

#Copy yaml file to outdir
shutil.copy(yaml_file, outdir + "/input.yaml")
Exemple #10
0
parser = argparse.ArgumentParser(prog="TRESplot.py",
                                 description="You've already run TRESio, now lets plot all the spectra!")
parser.add_argument("file", help="The HDF5 file you want to plot.")
parser.add_argument("--orders", default="all", help="Which orders of the spectrum do you want to plot?")
parser.add_argument("--spec2", help="The second spectrum to plot")
parser.add_argument("--norm", action="store_true", help="Normalize each order to 1?")

args = parser.parse_args()

import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter as FSF
from Starfish.spectrum import DataSpectrum

spec = DataSpectrum.open(args.file, orders=args.orders)

if args.spec2:
    spec2 = DataSpectrum.open(args.spec2, orders=args.orders)


#Set up the plot.
width = 20. #in; the size of my monitor
h = 1.5 #height per order.
norders = len(spec.wls)

matplotlib.rc("font", size=8)

fig, ax = plt.subplots(nrows=norders, figsize=(width, h*norders))

for a, wl, fl, sigma, order in zip(ax, spec.wls, spec.fls, spec.sigmas, spec.orders):
import gc
import logging

from itertools import chain
from operator import itemgetter
import yaml
import shutil
import json

orders = Starfish.data["orders"]
assert len(orders) == 1, "Can only use 1 order for now."
order = orders[0]

# Load just this order for now.
dataSpec = DataSpectrum.open(Starfish.data["files"][0],
                             orders=Starfish.data["orders"])
instrument = eval("Starfish.grid_tools." + Starfish.data["instruments"][0])()

# full_mask = create_mask(dataSpec.wls, Starfish.data["masks"][0])
# dataSpec.add_mask(full_mask)

wl = dataSpec.wls[0]

# Truncate these to our shorter range to make it faster
# ind = (wl > 5165.) & (wl < 5185.)
# wl = wl[ind]
#
fl = dataSpec.fls[0]  #[ind]
sigma = dataSpec.sigmas[0]  #[ind]
# mask = dataSpec.masks[0][ind]
ndata = len(wl)
Exemple #12
0
    if os.path.exists(outdir):
        print("Deleting", outdir)
        shutil.rmtree(outdir)

print("Creating ", outdir)
os.makedirs(outdir)

# Determine how many filenames are in config['data']. Always load as a list, even len == 1.
# If there are multiple datasets, this list will be longer than length 1
data = config["data"]
if type(data) != list:
    data = [data]
print("loading data spectra {}".format(data))
orders = config["orders"] #list of which orders to fit
order_ids = np.arange(len(orders))
DataSpectra = [DataSpectrum.open(data_file, orders=orders) for data_file in data]

# Number of different data sets we are fitting. Used for indexing purposes.
spectra = np.arange(len(DataSpectra))

INSTRUMENTS = {"TRES": TRES, "SPEX": SPEX}
#Instruments are provided as one per dataset
Instruments = [INSTRUMENTS[key]() for key in config["instruments"]]

masks = config.get("mask", None)
if masks is not None:
    for mask, dataSpec in zip(masks, DataSpectra):
        myMask = Mask(mask, orders=orders)
        dataSpec.add_mask(myMask.masks)

for model_number in range(len(DataSpectra)):