Пример #1
0
def test_template(data_type,data_spectrum,teff,logg,feh,start_lambda,end_lambda,shift):
    ### Import a template spectrum - test
    template_spectrum = "template_" + str(teff) + "_" + str(logg) + "_" + str(feh) + ".dat"

    if data_type=="flux":
        template_spectrum = functions.read_ascii(model_path_flux + template_spectrum)
    if data_type=="norm":
        template_spectrum = functions.read_ascii(model_path_norm + template_spectrum)

    template_spectrum = functions.read_table(template_spectrum)
    template_spectrum = transpose(array(template_spectrum))

    if data_type=="flux":
        template_spectrum = spectype_functions.normalise(template_spectrum,flux_normalise_w1,flux_normalise_w2)

    ### Chop both spectra
    data_region = spectype_numerical_functions.chop_spectrum(data_spectrum,start_lambda,end_lambda)
    template_region = spectype_numerical_functions.chop_spectrum(template_spectrum,start_lambda,end_lambda)

    ### Conform template spectrum to data spectrum -> same wavelength scale
    template_region = spectype_numerical_functions.conform_spectrum(data_region,template_region)

    ### Find shift

    data_region_shifted,template_region_shifted = spectype_functions.shift_spectrum(data_region,template_region,shift)
    chisq = spectype_numerical_functions.chisq(data_region_shifted,template_region_shifted)

    # plt.clf()
    # plt.plot(data_region_shifted[0],data_region_shifted[1])
    # plt.plot(template_region_shifted[0],template_region_shifted[1])
    # plt.title(data_type + " " + str(teff) + " " + str(logg) + " " + str(feh) +" " + str(chisq))
    # plt.show()

    return chisq
Пример #2
0
def find_shift(data_type,data_spectrum,teff,logg,feh,start_lambda,end_lambda):
    ### Import the template spectrum
    template_spectrum = "template_" + str(teff) + "_" + str(logg) + "_" + str(feh) + ".dat"

    if data_type=="flux":
        template_spectrum = functions.read_ascii(model_path_flux + template_spectrum)
    if data_type=="norm":
        template_spectrum = functions.read_ascii(model_path_norm + template_spectrum)

    template_spectrum = functions.read_table(template_spectrum)
    template_spectrum = transpose(array(template_spectrum))

    if data_type=="flux":
        template_spectrum =spectype_functions.normalise(template_spectrum,flux_normalise_w1,flux_normalise_w2)

    ### Chop both spectra
    data_region = spectype_numerical_functions.chop_spectrum(data_spectrum,start_lambda,end_lambda)
    template_region = spectype_numerical_functions.chop_spectrum(template_spectrum,start_lambda,end_lambda)

    ### Conform template spectrum to data spectrum -> same wavelength scale
    template_region = spectype_numerical_functions.conform_spectrum(data_region,template_region)

    ### Find shift
    chisq_shift = []

    shift_limit = 20
    shift  = -1*shift_limit
    while shift <= shift_limit:
        data_region_shifted,template_region_shifted = spectype_functions.shift_spectrum(data_region,template_region,shift)
        chisq_shift.append(spectype_numerical_functions.chisq(data_region_shifted,template_region_shifted))
        shift = shift + 1

    chisq_min = spectype_functions.find_min(chisq_shift)
    best_shift = chisq_min - shift_limit
    return best_shift
Пример #3
0
def query_hscand(query_entry):
    ### Write the mysql query csh script
    command = "mysql --defaults-file=/home/gzhou/hscand.cfg HSCAND -e \"" + query_entry + "\" > /home/gzhou/query_result.txt"
    mysql_query = open("mysql_query.csh","w")
    mysql_query.write("#! /bin/csh \n")
    mysql_query.write(command + "\n")
    mysql_query.close()

    ### Copy the required scripts to hatsouth@princeton
    os.system("chmod a+x mysql_query.csh")
    print "copying files to hatsouth@princeton"
    os.system("scp hscand.cfg [email protected]:/home/gzhou/")
    os.system("scp mysql_query.csh [email protected]:/home/gzhou/")

    ### Execute the program and copy the results over
    print "Executing .csh files on princeton via ssh"
    os.system("ssh [email protected] '/home/gzhou/mysql_query.csh'")
    os.system("scp [email protected]:/home/gzhou/query_result.txt .")

    ### Read query_result.txt in as a list
    query_result = functions.read_ascii("query_result.txt")
    query_result = functions.read_table(query_result)

    os.system("rm query_result.txt")
    os.system("rm mysql_query.csh")
    return query_result
Пример #4
0
def main(file_path,file_name):
    file_path_reduced = file_path+"reduced/"
    file_path_temp = file_path+"temp/"
    
    coo = functions.read_table(functions.read_ascii(file_path_temp+"master_coo"))
    spatial = loadtxt(file_path_reduced+"spatial_"+file_name+".dat")

    coord_fit = array(fit_2dgauss(coo,spatial))
    savetxt(file_path+"reduced/coords_"+file_name+".dat",coord_fit,fmt="%.10f")
Пример #5
0
def plot_isochrones(program_dir,style,lwidth):
    isochrones = functions.read_ascii(program_dir + "isochrone.dat")
    isochrones = functions.read_table(isochrones)

    isochrones = isochrones[:len(isochrones)-1]

    isochrones = transpose(isochrones)
    teff = 10**array(isochrones[4])
    logg = array(isochrones[5])

    plt.plot(teff,logg,style,linewidth=lwidth)
Пример #6
0
def run_fxcor(input_file,input_rv,lines):
    iraf.unlearn(iraf.keywpars)

    iraf.filtpars.setParam("f_type","square",check=1,exact=1)
    iraf.filtpars.setParam("cuton",50,check=1,exact=1)
    iraf.filtpars.setParam("cutoff",2000,check=1,exact=1)

    os.system("rm fxcor_shift*")
    iraf.fxcor(
        objects = input_file, \
        templates = input_rv, \
        apertures = "*", \
        cursor = "",\
        continuum = "both",\
        filter = "both",\
        rebin = "smallest",\
        pixcorr = 0,\
        osample = lines,\
        rsample = lines,\
        apodize = 0.2,\
        function = "gaussian",\
        width = 15,\
        height= 0.,\
        peak = 0,\
        minwidth = 15,\
        maxwidth = 15,\
        weights = 1.,\
        background = "INDEF",\
        window = "INDEF",\
        wincenter = "INDEF",\
        output = "fxcor_shift",\
        verbose = "long",\
        imupdate = 0,\
        graphics = "stdgraph",\
        interactive = 0,\
        autowrite = 1,\
        ccftype = "image",\
        observatory = "sso",\
        continpars = "",\
        filtpars = "",\
        keywpars = "")

    vel_shift = functions.read_ascii("fxcor_shift.txt")
    vel_shift = functions.read_table(vel_shift)
    vel_shift = str(vel_shift[0][11])

    if vel_shift == "INDEF":
        vel_shift = 0
    print "shifting by ",vel_shift,"km/s"

    return vel_shift
Пример #7
0
def loop_input_spectrum(input_wave,input_flux,folder,teff_space,logg_space,feh_space,w1,w2,perform_normalise):
    data = []
    for teff in teff_space:
        for logg in logg_space:
            for feh in feh_space:
                template_spectrum = "template_" + str(teff) + "_" + str(logg) + "_" + str(feh)+".dat"
                #print folder + template_spectrum
                template_spectrum = functions.read_ascii(folder+template_spectrum)
                template_spectrum = functions.read_table(template_spectrum)
                template_spectrum = transpose(array(template_spectrum))

                if folder == model_path_flux:
                    template_spectrum = spectype_functions.normalise(template_spectrum,flux_normalise_w1,flux_normalise_w2)
          

                i1 = w1 - min(input_wave)
                i2 = w2 - min(input_wave)

                input_wave_cropped = input_wave[i1:i2]
                input_flux_cropped = input_flux[i1:i2]

                template_spectrum = spectype_numerical_functions.chop_spectrum(template_spectrum,w1-10,w2+10)
                template_interp = interpolate.splrep(template_spectrum[0],template_spectrum[1],s=0)
                template_flux = interpolate.splev(input_wave_cropped,template_interp,der=0)

                sigma = 3.0

                if perform_normalise:
                    diff_flux = input_flux_cropped/median(input_flux_cropped) - template_flux/median(template_flux)

                else:
                    diff_flux = input_flux_cropped - template_flux

                diff_flux = clip(diff_flux,median(diff_flux) - sigma*std(diff_flux),median(diff_flux)+sigma*std(diff_flux))

                rms = sqrt(sum(diff_flux**2) /float(len(input_wave_cropped)))


                # plt.clf()
                # plt.plot(input_wave_cropped,input_flux_cropped/median(input_flux_cropped))
                # plt.plot(input_wave_cropped,template_flux/median(template_flux))
                # plt.show()
                # #sys.exit()

                #print rms
                data.append(rms)
    return data
Пример #8
0
### Set program dir and change working directory
program_dir = os.getcwd() + "/" #Save the current working directory
os.chdir(file_path_reduced) #Change to ../temp/ dir
os.system("mkdir ccf_pdfs")
os.system("rm ccf_pdfs/*" + file_name + "*")

################
### Load ccf ###
################
plt.clf()

hdulist = pyfits.open(file_path_reduced + "normspec_"+file_name)
candidate = hdulist[0].header["OBJECT"]
hdulist.close()

ccf = functions.read_ascii("ccf_" + file_name + ".txt")
ccf = functions.read_table(ccf)

### Find max
max_ccf = max(transpose(ccf)[1])
max_pos = 0

for i in range(len(ccf)):
    if ccf[i][1] == max_ccf:
        max_pos = i
        break

#ccf = ccf[i-200:i+200]
ccf = ccf[i-40:i+40]
ccf = transpose(ccf)
Пример #9
0
import functions
import os
import sys
from numpy import *
import matplotlib.pyplot as plt

def rms(input_list):
    input_list = array(input_list)
    rms = sqrt(sum(input_list**2) / len(input_list))
    return rms
               
RV = functions.read_ascii("aperture_RV_3aps.dat")
RV = functions.read_table(RV)

for i in RV:
    ap1 = i[2]
    ap1_RV = i[3]
    ap2 = i[4]
    ap2_RV = i[5]

    plt.plot(array([ap1,ap2]) - min([ap1,ap2]),[ap1_RV,ap2_RV])

plt.show()
Пример #10
0
def rms(input_list):
    rms = sqrt(sum(input_list**2) / len(input_list))
    return rms

def to_numbers(input_list):
    temp = []
    for i in input_list:
        temp.append(eval(i))
    return temp

#trial_no = "trial8"

trial_no = sys.argv[1]
               
RV = functions.read_ascii(trial_no + ".dat")
RV = functions.read_table(RV)

RV = transpose(RV)

RV_values = array(to_numbers(RV[3])) - median(to_numbers(RV[3]))
RV_err = to_numbers(RV[4])
ccf_height = to_numbers(RV[5])

print "RV RMS = ", rms(RV_values)
plt.clf()
plt.hist(RV_values,bins=10,histtype="step",hatch="/")
plt.xlabel("RV")
plt.show()

print "Median RV err = ", median(RV_err)
Пример #11
0
import os
import sys
import string
import functions

### This is a program meant for testing!
### usage: python run_spectype_all.py

file_path = functions.read_config_file("FILE_PATH")

program_dir = os.getcwd() + "/"
os.chdir(file_path + "/reduced/")

os.system("ls fluxcal_*.fits > file_list")
file_list = functions.read_ascii("file_list")
os.system("rm file_list")

os.chdir(program_dir)

for file_name in file_list:
    file_name = string.split(file_name,"_")
    file_name = file_name[1]
    print "******"
    print file_name
    os.system("python spectype_main.py " + file_path + " " + file_name)
    os.system("python update_spectype.py " + file_path + " " + file_name)

Пример #12
0
########################
### Start of program ###
########################

file_path = sys.argv[1]
file_path_temp = file_path + "temp/"
file_path_reduced = file_path + "reduced/"

file_name = sys.argv[2]

print "This script uses iraf.fxcor to generate a CCF for " +file_name + " using synthetic templates"

program_dir = os.getcwd() + "/" #Save the current working directory
### Load fxcor RV measurements
fxcor_stellar = functions.read_ascii(file_path_reduced + "fxcor_stellar.txt")
fxcor_stellar = functions.read_table(fxcor_stellar)

### Load grating / camera settings
grating = functions.read_config_file("GRATING")
dichroic = functions.read_config_file("RT560")

region_w1 = functions.read_param_file(grating+"_"+dichroic+"_w1")
region_w2 = functions.read_param_file(grating+"_"+dichroic+"_w2")

### Load location of library
synthetic_library = functions.read_param_file("RV_SPECTRAL_LIBRARY")

### Load RV fxcor region
stellar_region = functions.read_param_file("STELLAR_REGION")
Пример #13
0
### Set program dir and change working directory
program_dir = os.getcwd() + "/" #Save the current working directory
os.chdir(file_path_reduced) #Change to ../temp/ dir

### Find info from the fits header
hdulist = pyfits.open(file_path_reduced+"spec_" + file_name)
object_name = hdulist[0].header["OBJECT"]
dateobs = hdulist[0].header["DATE-OBS"]
mjd = hdulist[0].header["MJD-OBS"]
exptime = hdulist[0].header["EXPTIME"]
comment = hdulist[0].header["NOTES"]
hdulist.close()

### Read info from text files in reduced/
spectype = functions.read_ascii("spectype.txt")
spectype = functions.read_table(spectype)

for entry in spectype:
    if entry[1] == object_name and entry[0] == file_name:
        teff = entry[2]
        logg = entry[4]
        feh = entry[6]

image_quality = functions.read_ascii("image_quality.dat")
image_quality = functions.read_table(image_quality)

sn = 0.
entry_found = False
for entry in image_quality:
    if entry[0] == file_name and entry[1] == object_name:
Пример #14
0
def detect_stars(input_image,se_path,no_stars):
    
    image_data = pyfits.getdata(input_image)

    oned = []
    for i in range(len(image_data)):
        for j in range(len(image_data)):
            oned.append(image_data[i,j])

    med = median(oned)

    run_daofind(input_image,"master_coo",1)

    os.system("rm coords.cat")
    SEcommand = se_path+" "+input_image+" -c default.sex"
    SEcommand = SEcommand+" -BACK_TYPE MANUAL -BACK_VALUE "+str(med)
    os.system(SEcommand)

    os.system("cat coords.cat")

    SE_coo = functions.read_ascii("coords.cat")
    SE_coo = functions.read_table(SE_coo)

    temp = []
    for i in SE_coo:
        if i[0] < 36.:
            temp.append(i)
    SE_coo = temp

    phot_coo = functions.read_ascii("master_coo")
    phot_coo = functions.read_table(phot_coo)

    temp = []
    for i in phot_coo:
        if i[0] < 36.:
            temp.append(i)
    phot_coo  = temp

    ### Check if the objects in phot_coo exists also in SE_coo
    confirmed_objects = []

    for phot_obj in phot_coo:
        phot_obj_x = phot_obj[0]
        phot_obj_y = phot_obj[1]
        for SE_obj in SE_coo:
            SE_obj_x = SE_obj[0]
            SE_obj_y = SE_obj[1]
            SE_obj_fwhm = SE_obj[4]

            SE_obj_fwhm = 6
            
            # if SE_obj_fwhm < 5. or SE_obj_fwhm > 10.0:
            #     SE_obj_fwhm = 5

            if abs(phot_obj_x-SE_obj_x)<SE_obj_fwhm and abs(phot_obj_y-SE_obj_y)<SE_obj_fwhm:
                confirmed_objects.append(phot_obj)
                break

    if len(confirmed_objects) == 0 and len(SE_coo) > 0:
        print "NO matching objects, using SE coordinates"
        confirmed_objects = []
        for SE_obj in SE_coo:
            confirmed_objects.append([SE_obj[0],SE_obj[1],"INDEF",0.5,0.5,0.5,SE_obj[0]])

    elif len(confirmed_objects) == 0 and len(phot_coo) > 0:
        print "NO matching objects, using iraf.phot coordinates"
        confirmed_objects = phot_coo

    elif len(confirmed_objects)==0 and len(phot_coo)==0 and len(SE_coo)==0:
        print "NO objects detected!!!"
        sys.exit()


    ### Order by brightness


    flux_list = []
    for i in confirmed_objects:

        aperture = circle(i[1]-1,i[0]-1,2.0,image_data)
        flux = aperture*image_data - aperture*med
        flux = flux.sum()
        flux_list.append(flux)

    flux_list_sorted = sorted(flux_list,reverse=True)

    print "flux",flux_list_sorted

    temp = []
    for i in range(len(flux_list_sorted)):
        j = flux_list.index(flux_list_sorted[i])
        temp.append(confirmed_objects[j])
        
    confirmed_objects = temp
            
    ### remove unwanted objects
    if no_stars > 0:
        confirmed_objects = confirmed_objects[:no_stars]

    master_out = open("master_coo","w")
    functions.write_table(confirmed_objects,master_out)
    master_out.close()
Пример #15
0
hsmso_connect = functions.read_config_file("HSMSO_CONNECT")
hscand_connect = functions.read_config_file("HSCAND_CONNECT")
default_teff = float(functions.read_config_file("TEFF_ESTIMATE"))
default_logg = float(functions.read_config_file("LOGG_ESTIMATE"))
teff_ini,logg_ini = functions.estimate_teff_logg(object_name,hsmso_connect,hscand_connect,default_teff,default_logg)
feh_ini = 0.0

print "Initial estimate of teff, logg: ",str(teff_ini),str(logg_ini)

### Change directory to reduced/
program_dir = os.getcwd() + "/" #Save the current working directory
os.chdir(file_path_reduced) #Change to ../reduced/ dir

### Load in spectra
flux_spectrum = functions.read_ascii("fluxcal_" + file_name + ".dat")
flux_spectrum = functions.read_table(flux_spectrum)
flux_spectrum = transpose(array(flux_spectrum))
flux_spectrum = spectype_functions.normalise(flux_spectrum,flux_normalise_w1,flux_normalise_w2)

norm_spectrum = functions.read_ascii("norm_" + file_name + ".dat")
norm_spectrum = functions.read_table(norm_spectrum)
norm_spectrum = transpose(array(norm_spectrum))

print "Using specific regions for spectral typing"
### Check the temp and define which logg sensitive regions to use
#if teff_ini > 4750 and teff_ini < 5750:
if teff_ini > 4750 and teff_ini < 6250:
    #logg_regions = [[5140,5235]]
    logg_regions = [[5100,5400]]
if teff_ini <= 4750 and teff_ini > 4250:
Пример #16
0
#################
### Functions ###
#################

########################
### Start of program ###
########################
file_path = sys.argv[1]
file_path_temp = file_path + "temp/"
file_path_reduced = file_path + "reduced/"

file_name = sys.argv[2]

### Read in the correct image slice to analyse
slices = functions.read_ascii(file_path_temp + "stellar_apertures.txt")
slices = functions.read_table(slices)
slice_to_use = slices[0][0]

image_data = pyfits.getdata(file_path_temp + str(int(slice_to_use)) + "_" + file_name)
slice_data = image_data

### Chop the 200 columns in the centre of the image
image_data = transpose(image_data)
image_data = image_data[len(image_data)/2 - 100:len(image_data)/2 + 100]
image_data = transpose(image_data)
median_list = []
for i in image_data:
    median_list.append(median(i))

for i in range(len(median_list)):
Пример #17
0
        output = redden_name,\
        value = redden,\
        R = 3.1,\
        type = "E(B-V)",\
        apertures = "*",\
        override = 1,\
        uncorrect = 0,\
        mode = "al")

    ### Create .dat file out of fits file redden_name
    
    os.system("rm " + redden_name + ".dat")

    iraf.wspectext(redden_name + "[*,1,1]", redden_name + ".dat")

    spectrum = functions.read_ascii(redden_name + ".dat")
    spectrum = functions.read_table(spectrum)
    temp = []
    for i in spectrum:
        if len(i) == 2:
            if functions.is_number(i[0]):
                temp.append(i)
    spectrum = temp
    spectrum = spectrum[1:len(spectrum)-2]

    output_spectrum = open(redden_name + ".dat","w")
    functions.write_table(spectrum,output_spectrum)
    output_spectrum.close()

    os.system("mv " + redden_name + ".dat deredden")
    os.system("mv " + redden_name + " deredden")
Пример #18
0
    niterate = 10,\
    markrej = 1,\
    graphics = "stdgraph",\
    cursor = "",\
    ask = "no",\
    mode = "ql")

#######################
### Convert to .dat ###
#######################

os.system("rm norm_" + file_name + ".dat")

iraf.wspectext("norm_" + file_name +"[*,1,1]", "norm_" + file_name + ".dat")

spectrum = functions.read_ascii("norm_" + file_name + ".dat")
spectrum = functions.read_table(spectrum)
temp = []
for i in spectrum:
    if len(i) == 2:
        if functions.is_number(i[0]):
            temp.append(i)
spectrum = temp
spectrum = spectrum[1:len(spectrum)-2]

output_spectrum = open("norm_" + file_name + ".dat","w")
functions.write_table(spectrum,output_spectrum)
output_spectrum.close()

os.system("cp norm_" + file_name + "* " + file_path_reduced)
Пример #19
0
msun = 1.988435*10**30
rsun = 6.955*10**8
mjup = 1.8988*10**27 
rjup = 6.9173*10**7
day = 60.*60.*24.
gconst = 6.67*10**(-11)


### Read from config file
### Load initial parameters and lightcurve

temp_param_names = []
temp_param_vals = []
temp_param_range = []

lclist = functions.read_ascii(functions.read_config_file("INPUT_LC_LIST"))
lc = []
for lc_n in lclist:
    lc.append(loadtxt(lc_n))

cadence = []
for i in lc:
    cadence.append(functions.find_cadence(i))

lc_ld1 = eval(functions.read_config_file("LC_LD1"))
lc_ld1_err = eval(functions.read_config_file("LC_LD1_ERR"))
lc_ld2 = eval(functions.read_config_file("LC_LD2"))
lc_ld2_err = eval(functions.read_config_file("LC_LD2_ERR"))
for i in range(len(lc_ld1)):
    temp_param_names.append("lc_ld1")
    temp_param_vals.append(lc_ld1[i])
Пример #20
0
### Set program dir and change working directory
program_dir = os.getcwd() + "/" #Save the current working directory
os.chdir(file_path_reduced) #Change to ../temp/ dir

### Find info from the fits header
hdulist = pyfits.open(file_path_reduced+"normspec_" + file_name)
object_name = hdulist[0].header["OBJECT"]
dateobs = hdulist[0].header["DATE-OBS"]
mjd = hdulist[0].header["MJD-OBS"]
exptime = hdulist[0].header["EXPTIME"]
comment = hdulist[0].header["NOTES"]
hdulist.close()

### Read info from text files in reduced/
RV_dat = functions.read_ascii("RV.dat")
RV_dat = functions.read_table(RV_dat)

for entry in RV_dat:
    if entry[0] == object_name and entry[1] == file_name:
        if functions.is_number(entry[2]):
            hjd = entry[2] + 50000
            RV = entry[3]
            RV_err = entry[4]
            ccf_height = entry[5]

ccf_log = functions.read_ascii("ccf_log.txt")
ccf_log = functions.read_table(ccf_log)

ccf_fwhm = 0
bis = 0
Пример #21
0
### into a single spectrum, to boost S/N.

### Usage: python combine_apertures.py file_path file_name

########################
### Start of program ###
########################

### Set file_path
file_path = sys.argv[1]
file_path_temp = file_path + "temp/"
file_path_reduced = file_path + "reduced/"

file_name = sys.argv[2]

image_slices = functions.read_ascii(file_path_temp + "stellar_apertures.txt")

camera = functions.read_config_file("CAMERA")
grating = functions.read_config_file("GRATING")
dichroic = functions.read_config_file("DICHROIC")

spectrum_w1 = functions.read_param_file(grating+"_"+dichroic+"_w1")
spectrum_w2 = functions.read_param_file(grating+"_"+dichroic+"_w2")

sample_w1 = functions.read_param_file("FLUX_NORMALISE_w1")
sample_w2 = functions.read_param_file("FLUX_NORMALISE_w2")
sample_region = "a"+sample_w1+"-"+sample_w2

combine_apertures = functions.read_config_file("COMBINE_APERTURES")

program_dir = os.getcwd() + "/" #Save the current working directory
Пример #22
0
import string
import functions
import sys

##############################
### Check RV_Standard_list ###
##############################

### Usage: python check_RV_list.py RV_standard_list

RV_Standard_list = functions.read_ascii(sys.argv[1])

if RV_Standard_list == []:
    print "False"
else:
    print "True"
Пример #23
0
au = 1.496 * 10**11
msun = 1.988435 * 10**30
rsun = 6.955 * 10**8
mjup = 1.8988 * 10**27
rjup = 6.9173 * 10**7
day = 60. * 60. * 24.
gconst = 6.67 * 10**(-11)

### Read from config file
### Load initial parameters and lightcurve

temp_param_names = []
temp_param_vals = []
temp_param_range = []

lclist = functions.read_ascii(functions.read_config_file("INPUT_LC_LIST"))
lc = []
for lc_n in lclist:
    lc.append(loadtxt(lc_n))

cadence = []
for i in lc:
    cadence.append(functions.find_cadence(i))

lc_ld1 = eval(functions.read_config_file("LC_LD1"))
lc_ld1_err = eval(functions.read_config_file("LC_LD1_ERR"))
lc_ld2 = eval(functions.read_config_file("LC_LD2"))
lc_ld2_err = eval(functions.read_config_file("LC_LD2_ERR"))
for i in range(len(lc_ld1)):
    temp_param_names.append("lc_ld1")
    temp_param_vals.append(lc_ld1[i])
Пример #24
0
### Read from config file
no_apertures = int(functions.read_config_file("NO_APERTURES"))
no_stars = int(functions.read_config_file("NO_STARS"))
se_path = functions.read_param_file("SE_PATH")
program_dir = os.getcwd()+"/"

### Set file_path
file_path = sys.argv[1]
file_path_temp = file_path + "temp/"
file_path_reduced = file_path + "reduced/"

file_name = sys.argv[2]

interactive = functions.read_config_file("INTERACT")

image_slices_list = functions.read_ascii(file_path_temp + "slice_" + file_name+".txt")
image_slices_list = functions.read_table(image_slices_list)
image_slices_list = image_slices_list[1:]

hdulist = pyfits.open(file_path + file_name)
object_name = hdulist[0].header['OBJECT']
hdulist.close()

os.chdir(file_path_temp)

########################################################
### Reconstruct array by reading in each image slice ###
########################################################

spatial_image = []
for image_slice in image_slices_list:
Пример #25
0
    stellar_region = "*"
if teff > 7500:
    stellar_region = "a6450-6700"

print "This script uses iraf.fxcor to find RV solution of " + file_name

program_dir = os.getcwd() + "/"  # Save the current working directory

### Change directory to reduced/
os.chdir(file_path_reduced)  # Change to ../reduced/ dir

#################################
### Load all the RV_standards ###
#################################

RV_list = functions.read_ascii(file_path_temp + "RV_Standard_list")

for i in range(len(RV_list)):
    RV_list[i] = "normspec_A_" + RV_list[i] + ".fits"
    print RV_list[i]


### Check that it exists, else take it out of the list
temp_list = []
for i in range(len(RV_list)):
    if os.path.exists(file_path_reduced + RV_list[i]):
        temp_list.append(RV_list[i])
        print RV_list[i]

RV_list = temp_list
if len(RV_list) > 0:
Пример #26
0
msun = 1.988435*10**30
rsun = 6.955*10**8
mjup = 1.8988*10**27 
rjup = 6.9173*10**7
day = 60.*60.*24.
gconst = 6.67*10**(-11)


### Read from config file
### Load initial parameters and lightcurve

temp_param_names = []
temp_param_vals = []
temp_param_range = []

lclist = functions.read_ascii(functions.read_config_file("INPUT_LC_LIST"))
lc = []
for lc_n in lclist:
    lc.append(loadtxt(lc_n))

cadence = []
for i in lc:
    cadence.append(functions.find_cadence(i))


lc_ld1 = eval(functions.read_config_file("LC_LD1"))
lc_ld1_err = eval(functions.read_config_file("LC_LD1_ERR"))
lc_ld2 = eval(functions.read_config_file("LC_LD2"))
lc_ld2_err = eval(functions.read_config_file("LC_LD2_ERR"))
for i in range(len(lc_ld1)):
    temp_param_names.append("lc_ld1")
hdulist = pyfits.open(file_path + file_name)
object_mjd = hdulist[0].header['MJD-OBS']
hdulist.close()

camera = functions.read_config_file("CAMERA")
grating = functions.read_config_file("GRATING")
dichroic = functions.read_config_file("DICHROIC")
combine_aps = functions.read_config_file("COMBINE_APERTURES")
task = functions.read_config_file("TASK")
no_apertures = eval(functions.read_config_file("NO_APERTURES"))

print "This script applies NeAr arc image to calibrate the object spectrum " +file_name

### Get slice numbers and arc images to use
arc_list = functions.read_ascii(file_path_temp + "arcs_to_use.txt")
coo = functions.read_ascii(file_path_temp+"master_coo")
coo = functions.read_table(coo)

### Calculate the fractional weight of each arc 
arc_weight = []
for arc_name in arc_list:
    hdulist = pyfits.open(file_path + arc_name)
    arc_mjd = hdulist[0].header['MJD-OBS']
    hdulist.close()

    arc_weight.append(abs(arc_mjd - object_mjd))

arc_weight = array(arc_weight)
arc_weight = arc_weight / sum(arc_weight)
Пример #28
0
### Start of program ###
########################

### Set file_path
file_path = sys.argv[1]
file_path_temp = file_path + "temp/"
file_path_reduced = file_path + "reduced/"

file_name = sys.argv[2]
biassubtracted_file_name = "out_ccdproc_" + file_name

### Load in the image slices table
### This table was created by define_image_slices.py
### and contains locations of the image slices
### according to a flat field frame
image_slices = functions.read_ascii(file_path_temp + "image_slice_table.txt")
image_slices = functions.read_table(image_slices)

### Loop through and cut out each slice
### save in individual files

print "Chopping image into its image slices"

os.chdir(file_path_temp)

slices_file_list = ""
for i in range(len(image_slices)):
    start_column = int(image_slices[i][0])
    end_column = int(image_slices[i][1])
    region = '[1:4093,' + str(start_column) + ':'+str(end_column)+']'
    print region
Пример #29
0
        free_param_vals.append(temp_param_vals[i])
        free_param_range.append(temp_param_range[i])
        free_param_func.append("b")

print "FREE PARAMS"
for i in range(len(free_param_names)):
    print free_param_names[i],free_param_vals[i],free_param_range[i]


print "FIXED PARAMS"
for i in range(len(fixed_param_names)):
    print fixed_param_names[i],fixed_param_vals[i]

x0 = zeros(len(free_param_names))

free_param_vals = [functions.read_ascii("best_param_mcmc")[1]]
free_param_vals = array(functions.read_table(free_param_vals))[0]

print free_param_vals

phase,flux,err,model = fitting_functions.lc_chisq(free_param_vals,free_param_names,fixed_param_names,fixed_param_vals,lc,False,True)

### Plot data
plt.clf()
plt.scatter(phase,flux,s=1,color="k")
plt.scatter(phase+1,flux,s=1,color="k")

plt.scatter(phase,model,s=2,color="r")
plt.scatter(phase+1,model,s=2,color="r")

plt.xlim(0.995,1.005)
Пример #30
0
dichroic = functions.read_config_file("DICHROIC")

### Set program dir and change working directory
program_dir = os.getcwd() + "/" #Save the current working directory
os.chdir(file_path_reduced) #Change to ../temp/ dir

### Find info from the fits header
hdulist = pyfits.open(file_path + file_name)
object_name = hdulist[0].header["OBJECT"]
dateobs = hdulist[0].header["DATE-OBS"]
mjd = hdulist[0].header["MJD-OBS"]
exptime = hdulist[0].header["EXPTIME"]
comment = hdulist[0].header["NOTES"]
hdulist.close()

image_quality = functions.read_ascii("image_quality.dat")
image_quality = functions.read_table(image_quality)

for entry in image_quality:
    if entry[0] == file_name and entry[1] == object_name:
        sn = entry[5]

import MySQLdb
sql_date = string.split(dateobs,"T")[0]
sql_time = string.split(dateobs,"T")[1]

print "Connecting to database"
db=MySQLdb.connect(host="marbles.anu.edu.au",user="******",passwd="h@ts0uthDB",db="daniel1")

c = db.cursor()
c.execute("""SELECT SPECid FROM SPEC WHERE SPECmjd=""" + str(mjd) + """ and SPECobject=\"%s\" """ % object_name)
Пример #31
0
iraf.continpars.setParam("order",2,check=1,exact=1)
iraf.continpars.setParam("low_reject",2.0,check=1,exact=1)
iraf.continpars.setParam("high_reject",2.0,check=1,exact=1)

### Then apply fxcor to the stellar regions for RV measurement
os.system("rm fxcor_stellar*")
#region = "*"
#region = "a5700-6100"
region = "a5250-6815"
normalise(file_name)

run_fxcor("temp.fits","mdwarf_template_norm.fits",region,"fxcor_stellar",0,False)
os.system("cat fxcor_stellar.txt")

### Now calculate RV
data = functions.read_ascii("fxcor_stellar.txt")
data = functions.read_table(data)

rv = []
rverr = []
for i in data:
    if functions.is_number(i[3]):
        hjd = i[3]+50000

    if functions.is_number(i[12]):
        if abs(i[12]) < 500 and abs(i[13]) < 500:
            rv.append(i[12])
            rverr.append(i[13])
    
RV = median(rv)
RV_err = median(rverr)