import functions as f
import matplotlib.pyplot as plt
import numpy as np
import time as t

t0 = t.clock()

kplr_id = '008191672'
kplr_file = 'kplr008191672-2013011073258_llc.fits'
jdadj, obsobject, lightdata = f.openfile(kplr_id, kplr_file)

time, flux, flux_err = f.fix_data(lightdata)
flux, variance = f.rescale(flux, flux_err)
time -= np.median(time)

depth = 0.00650010001
width = 0.177046694669

period_interval = np.arange(2.00, 8.0, 0.01)
offset_intervals = np.arange(0.00, 7.2, 0.01)

#Change to numpy arrays to optimize.
# z = [[f.sum_chi_squared(flux, f.box(p,o,depth,width,time),variance) for o in offset_intervals]
# for p in period_interval]

z = []
for p in period_interval:
	line = []
	for o in offset_intervals:
		if o < p:
			line.append(f.sum_chi_squared(flux, f.box(p,o,depth,width,time), variance))
import functions as f
import matplotlib.pyplot as plt
import numpy as np
import time as t

t0 = t.clock()

kplr_id = '008191672'
kplr_file = 'kplr008191672-2013011073258_llc.fits'
jdadj, obsobject, lightdata = f.openfile(kplr_id, kplr_file)

time, flux, flux_err = f.fix_data(lightdata)
flux, variance = f.rescale(flux, flux_err)
time -= np.median(time)

depth = 0.00650010001
width = 0.177046694669

period_interval = np.arange(2.00, 8.0, 0.01)
offset_intervals = np.arange(0.00, 7.2, 0.01)

#Change to numpy arrays to optimize.
# z = [[f.sum_chi_squared(flux, f.box(p,o,depth,width,time),variance) for o in offset_intervals]
# for p in period_interval]

z = []
for p in period_interval:
    line = []
    for o in offset_intervals:
        if o < p:
            line.append(
Пример #3
0
def main(kplr_id):
    star = client.star(kplr_id)
    width = 1.0

    lcs = star.get_light_curves(short_cadence=False)

    med_flux_list = []
    filter_size = 80

    time_list, flux_list, var_list, ferr_list = [], [], [], []
    start_time = timer.time()
    for lc in lcs:
        with lc.open() as f:
            # The lightcurve data are in the first FITS HDU.
            hdu_data = f[1].data
            time, flux, ferr = functions.fix_data(hdu_data)
            time_list.append(time)
            flux_list.append(flux)
            median = functions.median_filter(flux, filter_size)
            ferr_list.append(ferr / median)
            var_list.append((ferr / median)**2)
            med_flux_list.append(flux / median)

    time = np.concatenate(time_list)
    flux = np.concatenate(flux_list)
    med_flux = np.concatenate(med_flux_list)
    inv_var = 1 / (np.concatenate(var_list))
    ferr = np.concatenate(ferr_list)

    print "Loading data", timer.time() - start_time

    #Run the search
    #time_grid makes the time array coarse. This will allow the number of searches to
    #be much less than doing the calculations at all times.
    time_grid = np.arange(min(time), max(time), width / 8)
    print time.shape, time_grid.shape
    start_time = timer.time()
    # main_array = np.asarray([functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var) for o in time_grid])
    main_array = np.array([
        functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var)
        for o in time_grid
    ])
    transit_boolean_array = np.array(
        [functions.get_transit_boolean(o, width, time) for o in time_grid])
    print "Search time", timer.time() - start_time
    depth_array = main_array[:, 0]
    depth_variance_array = main_array[:, 1]
    ln_like_array = main_array[:, 2]

    #Use the peak_finder function to obtain the boolean arrays used to
    #get the required "transit window" values.
    start_time = timer.time()
    complete_boolean_list, grid_boolean_list, peaks, peak_index = functions.peak_finder(
        ln_like_array, time, time_grid, width, 5)
    print "Peak finding time", timer.time() - start_time
    print peaks
    print peak_index

    pp = PdfPages('../plots/{0}_width_{1}.pdf'.format(kplr_id, width))
    fig1 = plt.figure()

    sub1 = fig1.add_subplot(211)
    sub1.plot(time, med_flux, '.k', markersize=2)
    sub1.set_xlabel("Days")
    sub1.set_ylabel("Median-filtered Flux")
    sub1.grid()

    sub2 = fig1.add_subplot(212)
    sub2.plot(time_grid, ln_like_array, '.b', markersize=2)
    sub2.set_xlabel("Days")
    sub2.set_ylabel("ln_Likelihood")
    sub2.grid()

    pp.savefig()

    fig2 = plt.figure()
    sub3 = fig2.add_subplot(211)
    sub3.plot(time_grid, depth_array, '.b', markersize=2)
    sub3.set_xlabel("Days")
    sub3.set_ylabel("Depth")
    sub3.grid()

    sub4 = fig2.add_subplot(212)
    sub4.plot(time, flux, '.b', markersize=2)
    sub4.set_xlabel("Days")
    sub4.set_ylabel("Raw Flux")
    sub4.grid()
    pp.savefig()
    #Plot the peaks and their likelihoods.
    functions.plot_peaks(complete_boolean_list, grid_boolean_list, time,
                         med_flux, ferr, time_grid, ln_like_array, depth_array,
                         transit_boolean_array, peak_index, pp)

    pp.savefig()
    pp.close()
    plt.close('all')
Пример #4
0
def main(kplr_id):
    star = client.star(kplr_id)
    width = 1.0

    lcs = star.get_light_curves(short_cadence=False)

    med_flux_list = []
    filter_size = 80

    time_list, flux_list, var_list, ferr_list = [], [], [], []
    start_time = timer.time()
    for lc in lcs:
        with lc.open() as f:
            # The lightcurve data are in the first FITS HDU.
            hdu_data = f[1].data
            time, flux, ferr = functions.fix_data(hdu_data)
            time_list.append(time)
            flux_list.append(flux)
            median = functions.median_filter(flux, filter_size)
            ferr_list.append(ferr/median)
            var_list.append((ferr / median)**2)
            med_flux_list.append(flux / median)

    time = np.concatenate(time_list)
    flux = np.concatenate(flux_list)
    med_flux = np.concatenate(med_flux_list)
    inv_var = 1/(np.concatenate(var_list))
    ferr = np.concatenate(ferr_list)

    print "Loading data", timer.time() - start_time

    #Run the search
    #time_grid makes the time array coarse. This will allow the number of searches to
    #be much less than doing the calculations at all times.
    time_grid = np.arange(min(time), max(time), width/8)
    print time.shape, time_grid.shape
    start_time = timer.time()
    # main_array = np.asarray([functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var) for o in time_grid])
    main_array = np.array([functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var) for o in time_grid])
    transit_boolean_array = np.array([functions.get_transit_boolean(o, width, time) for o in time_grid])
    print "Search time", timer.time() - start_time
    depth_array = main_array[:,0]
    depth_variance_array = main_array[:,1]
    ln_like_array = main_array[:,2]

    #Use the peak_finder function to obtain the boolean arrays used to
    #get the required "transit window" values.
    start_time = timer.time()
    complete_boolean_list, grid_boolean_list, peaks, peak_index = functions.peak_finder(ln_like_array, time, time_grid, width, 5)
    print "Peak finding time", timer.time() - start_time
    print peaks
    print peak_index

    pp = PdfPages('../plots/{0}_width_{1}.pdf'.format(kplr_id, width))
    fig1 = plt.figure()

    sub1 = fig1.add_subplot(211)
    sub1.plot(time, med_flux, '.k', markersize = 2)
    sub1.set_xlabel("Days")
    sub1.set_ylabel("Median-filtered Flux")
    sub1.grid()

    sub2 = fig1.add_subplot(212)
    sub2.plot(time_grid, ln_like_array, '.b', markersize = 2)
    sub2.set_xlabel("Days")
    sub2.set_ylabel("ln_Likelihood")
    sub2.grid()

    pp.savefig()

    fig2 = plt.figure()
    sub3 = fig2.add_subplot(211)
    sub3.plot(time_grid, depth_array, '.b', markersize = 2)
    sub3.set_xlabel("Days")
    sub3.set_ylabel("Depth")
    sub3.grid()

    sub4 = fig2.add_subplot(212)
    sub4.plot(time, flux, '.b', markersize = 2)
    sub4.set_xlabel("Days")
    sub4.set_ylabel("Raw Flux")
    sub4.grid()
    pp.savefig()
    #Plot the peaks and their likelihoods.
    functions.plot_peaks(complete_boolean_list, grid_boolean_list, time, med_flux, ferr, time_grid, ln_like_array, depth_array, transit_boolean_array, peak_index, pp)

    pp.savefig()
    pp.close()
    plt.close('all')
Пример #5
0
import functions
client = kplr.API()

star = client.star(8800954)

lcs = star.get_light_curves(short_cadence=False)

med_flux_list = []
filter_size = 80

time_list, flux_list, ferr_list = [], [], []
for lc in lcs:
    with lc.open() as f:
        # The lightcurve data are in the first FITS HDU.
        hdu_data = f[1].data
        time, flux, ferr = functions.fix_data(hdu_data)
        time_list.append(time)
        flux_list.append(flux)
        median = functions.median_filter(flux, filter_size)
        ferr_list.append((ferr / median)**2)
        med_flux_list.append(flux / median)

time = np.concatenate(time_list)
flux = np.concatenate(flux_list)
med_flux = np.concatenate(med_flux_list)

depth = 0.003
width = 0.1

#Run the search
ln_like_perfect = np.asarray([functions.pre_ln_like(med_flux, functions.push_box_model(o, depth, width, time)) for o in time])
def main(kplr_id, width):
    star = client.star(kplr_id)
    lcs = star.get_light_curves(short_cadence=False)

    med_flux_list = []
    filter_size = 80

    time_list, flux_list, var_list, ferr_list = [], [], [], []
    start_time = timer.time()
    for lc in lcs:
        with lc.open() as f:
            # The lightcurve data are in the first FITS HDU.
            hdu_data = f[1].data
            time, flux, ferr = functions.fix_data(hdu_data)
            time_list.append(time)
            flux_list.append(flux)
            median = functions.median_filter(flux, filter_size)
            ferr_list.append(ferr/median)
            var_list.append((ferr / median)**2)
            med_flux_list.append(flux / median)

    time = np.concatenate(time_list)
    med_flux = np.concatenate(med_flux_list)
    inv_var = 1/(np.concatenate(var_list))
    ferr = np.concatenate(ferr_list)
    
    print "Loading data", timer.time() - start_time

    #Run the search
    #time_grid makes the time array coarse. This will allow the number of searches to
    #be much less than doing the calculations at all times.
    time_grid = np.arange(min(time), max(time), width/8)


    print time.shape, time_grid.shape
    start_time = timer.time()
    # main_array = np.asarray([functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var) for o in time_grid])
    main_array = np.array([functions.get_depth_and_ln_like(med_flux, o, width, time, inv_var) for o in time_grid])
    # transit_boolean_array = np.array([functions.get_transit_boolean(o, width, time) for o in time_grid])
    print "Search time", timer.time() - start_time
    depth_array = main_array[:,0]
    depth_variance_array = main_array[:,1]
    ln_like_array = main_array[:,2]

    ###OPEN, WRITE, CLOSE DATA TO FILE###
    # file_name = '../picklefiles/{0}_width_{1}.hdf5'.format(kplr_id, width)
    file_name = '../automatically_created_files/{0}_width_{1}.hdf5'.format(kplr_id, width)

    f = h5py.File(file_name, 'w')
    f.create_dataset("time", data=time)
    f.create_dataset("med_flux", data=med_flux)
    f.create_dataset("inv_var", data=inv_var)
    f.create_dataset("ferr", data=ferr)
    f.create_dataset("time_grid", data=time_grid)
    # f.create_dataset("transit_boolean_array", data=transit_boolean_array)
    f.create_dataset("depth_array", data=depth_array)
    f.create_dataset("depth_variance_array", data=depth_variance_array)
    f.create_dataset("ln_like_array", data=ln_like_array)
    print time.shape
    # print transit_boolean_array.shape
    # assert 0

    f.close()