예제 #1
0
def EXTRACT():
    path = input("-> Enter path to target's exposure time directory: ")
    images = glob.glob(path + '/data/*.fits')
    psf_data = glob.glob(path + '/psf/*')
    if len(psf_data) == 3 * len(images):
        sex.sextractor(path)
        sex.src_filter(path)
    else:
        sex.sextractor_psf(path)
        psf.psfex(path)
        sex.sextractor(path)
        sex.src_filter(path)
예제 #2
0
def sim_fakes(location, n_fakes=20, iterations=50, input_mode='flux', PSF='moffat', subtract_method='ois', f_min=0, f_max=40000):
    '''Simulates transient signals (fakes) and tests **OASIS**'s ability to detect them. The procedure of the simulation is as follows:
        
            1. Makes a copy of the specified data set and moves it to the **simulations** directory.
            2. Chooses a random image out of the data set and adds in fakes.
            3. Runs the data set through the **OASIS Pipeline**.
            4. Outputs a catalog of all fakes and whether or not they were detected.
            5. Simulation is repeated with a different set of fakes.
            
            :param str location: Path of data file tree (contains the **configs**, **data**, **psf**, **residuals**, **sources**, **templates** directories). Use a comma-separated list for mapping to multiple datasets.
            :param int n_fakes: Number of fakes added to the chosen image.
            :param int iterations: Number of iterations the simulation goes through. The total number of fakes added is then :math:`n\_fakes * iterations`. It is reccommended to choose *n_fakes* and *iterations* such that the total number of fakes is high, at least a few hundred, ideally more than 1000.
            :param str input_mode: How to interpret fake's flux parameter.
            
                * *flux* (default): Fake's brightness is assumed to be total flux of the fake in ADU and is determined by *f_min* and *f_max* parameters.
                * *mag*: Fake's brightness is given in magnitudes instead of ADU flux. *f_min* and *f_max* are then assumed to be apparent magnitudes rather than ADU counts.
            
            :param str PSF: Type of PSF model used for fake construction. See documentation for details.
            
                * *moffat* (default): Fakes are convolved with a 2D Moffat kernel.
                * *gaussian*: Fakes are convolved with a symmetric 2D Gaussian kernel.
                
            :param str subtract_method: Subtraction method used, can be either *ois* or *hotpants*, default is *ois*. See ``subtract`` method's documentation for details.
            :param float f_min: Minimum flux for fakes. Assumed to either be given in ADU counts or apparent magnitudes depending on *input_mode*.
            :param float f_max: Maximum flux for fakes. Assumed to either be given in ADU counts or apparent magnitudes depending on *input_mode*.
            :returns: Catalog of all fakes, the image they were added to, iteration, and whether or not they were detected. See documentation for details.
    
    '''
    try:
        #pick a random image in location dataset to use as the fake
        dataImages = glob.glob(location + '/data/*.fits')
        imIndex = np.random.randint(low=0, high=(len(dataImages)-1))
        image = dataImages[imIndex]
        tarIndex = len(loc.split('/'))
        target = location.split('/')[tarIndex+2:tarIndex+3]
        target = target[0]
        #copy data to simulations directory
        check = loc + '/OASIS/simulations/' + target
        if os.path.exists(check) == False:
            copy_to_sim(target)
        #rename image paths to correspond to simulations directory
        image = image.replace('targets', 'simulations')
        image_name = image.split('/')[-1]
        length = len(image_name) + 6
        location = image[:-length]
        location = location.replace(target, "%s_fakes" % (target))
        image = image.replace(target, "%s_fakes" % (target))
    #    #copy image to be faked to exposure time directory so it can be retrieved
    #    os.system('cp %s %s' % (image, location))
        #define location of simulation results files
        fake_txt = location + '/results_fake.txt'
        MR_txt = location + '/results_MR.txt'
        #get PSF FWHM of original input image
        if os.path.exists(image.replace('data','psf')[:-4]+'cat') == False:
            sex.sextractor_psf_sim(location, image)
            psfex(location)    
        FWHM = fwhm(image)
        #get input image data and header
        image_hdu = fits.open(image)
        image_header = image_hdu[0].header
        image_data = image_hdu[0].data
        image_mask = image_hdu[1].data
        image_hdu.close()
        shape = image_data.shape
        #move fake image to configs directory
        os.system("mv %s %s/configs" % (image, location))
        #redefine location of image
        image_new_loc = image.replace('data', 'configs')
        #convert input mags to fluxes
        if input_mode == 'mag':
            f_min = mag_to_flux(image, f_min)
            f_max = mag_to_flux(image, f_max)
        fake_name = image
        #perform simulation for 'iterations' number of loops
        for i in tqdm(range(iterations)):
            #define blank results slates
            fake_results = []
            MR_results = []
            #delete all previous simluations data
            clear_image(image)
            #make 'n_fakes' fluxes
            print("-> Creating fakes...")
            flux_scales = np.random.random(n_fakes)
            flux = ((f_max-f_min)*flux_scales) + f_min
            x = [round(shape[0]*np.random.random()) for i in range(n_fakes)]
            y = [round(shape[1]*np.random.random()) for j in range(n_fakes)]
            #print fake sources' info
            print("-> Fake fluxes: \n" + "-> " + str(flux))
            print("-> Fake x: \n" + "-> " + str(x))
            print("-> Fake y: \n" + "-> " + str(y))
            print("-> Fake PSF: %s" % (PSF))
            print("-> Fake FWHM: %.3f\n" % (FWHM))
            if PSF == 'gaussian':
                #make fake image with Gaussian profile
                print("-> Gaussian smearing fakes...")
                gaussian_kernel_1 = Gaussian2DKernel(x_stddev=(FWHM/2.355), y_stddev=(FWHM/2.355))
                gaussian_kernel_2 = Gaussian2DKernel(x_stddev=((FWHM*2)/2.355), y_stddev=((FWHM*2)/2.355))
                conv_kernel = (0.9*gaussian_kernel_1) + (0.1*gaussian_kernel_2)
                fake = make_stars.make_image(shape[0], shape[1], x_loc=x, y_loc=y, fluxes=flux, psf=[conv_kernel])
            elif PSF == 'moffat':
                print("-> Moffat smearing fakes...")
                #define Moffat convolution kernel
                conv_kernel = Moffat2DKernel(gamma=make_stars.get_moffat_gamma(FWHM), alpha=4.765)
                #make image using fluxes na dpositions defined earlier, then convolve with above kernel
                fake = make_stars.make_image(shape[0], shape[1], x_loc=x, y_loc=y, fluxes=flux, psf=[conv_kernel])
            #add fake to original image and overwrite the OG fits file
            print("-> Adding fake to original image...")
            fake += image_data
            hduData = fits.PrimaryHDU(fake, header=image_header)
            hduMask = fits.ImageHDU(image_mask)
            hduList = fits.HDUList([hduData, hduMask])
            hduList.writeto(fake_name, overwrite=True)
            #run images through pipeline
            subtract.subtract_run(location, method=subtract_method)
            #run SExtractor on only fake image
            sex.sextractor_sim(fake_name.replace('_N_', '_A_'))
            #run SExtractor also on master residual to look for fakes
            sex.sextractor_MR(location)
            #find any fakes that were detected by SExtractor in fake catalog
            with open(location+'/sources/filtered_sources.txt', 'r') as src:
                detects = src.readlines()
                src.close()
            for n in range(n_fakes):
                found = 0
                for d in detects:
                    try:
                        float(d.split()[0])
                        if (y[n]-2)<float(d.split()[2])<(y[n]+2) and (x[n]-2)<float(d.split()[3])<(x[n]+2):
                            found += 1
                    except:
                        pass
                fake_results.append([i,image_name,x[n],y[n],flux[n],found])
            #write simulation results to fake_results.txt file
            with open(fake_txt, 'a+') as sim_data:
                sim_data.writelines(tabulate(fake_results))
                sim_data.close()
            #find any fakes that were detected by SExtractor in MR catalog
            with open(location+'/sources/MR_sources_filtered.txt', 'r') as src:
                detects = src.readlines()
                src.close()
            for n in range(n_fakes):
                found = 0
                for d in detects:
                    try: 
                        float(d.split()[0])
                        if (y[n]-2)<float(d.split()[2])<(y[n]+2) and (x[n]-2)<float(d.split()[3])<(x[n]+2):
                            found += 1
                    except:
                        pass
                MR_results.append([i,image_name,x[n],y[n],flux[n],found])
            #write simulation results to MR_results.txt file
            with open(MR_txt, 'a+') as sim_data:
                sim_data.writelines(tabulate(MR_results))
                sim_data.close()
        #move fake image from configs back to data directory
        os.system("mv %s %s/data" % (image_new_loc, location))
    except KeyboardInterrupt:
        print('\n-> Interrupted-- Exiting..')
        try:
            clear_image(image)
            os.system("mv %s %s/data" % (image_new_loc, location))
            sys.exit(0)
        except SystemExit:
            os._exit(0)
예제 #3
0
def TEST():
    #get data from LCO public archive and put in target directory under 'TEST' folder
    print("-> Getting data from LCO...")
    response = requests.get('https://archive-api.lco.global/frames/?' +
                            'RLEVEL=91&' +
                            'PROPID='+'standard&'
                            'OBJECT='+'L113&' + 
                            'FILTER='+'B&'
                            'start='+'2018-9-14'+'&' +
                            'end='+'2018-9-15'+'&'
                            ).json()
    
    frames = response['results']
    
    #delete bad images
    del_fr = []
    
    for fr in frames:
        if fr['id'] != 9602135 and fr['id'] != 9602132:
            del_fr.append(fr)
    
    for delete in del_fr:
        del frames[frames.index(delete)]
    
    #download data
    temp_loc = loc + '/sdi/temp/'
    os.mkdir(temp_loc+'test_data')
    for frame in frames:
      with open(temp_loc + 'test_data/' + frame['filename'], 'wb') as f:
        f.write(requests.get(frame['url']).content)
        
    #funpack and move to 'TEST' folder
    obtain.process()
    obtain.movetar()
    old_data_location = obtain.rename()
    data_location = old_data_location.replace('L113', 'TEST')
    os.rename(old_data_location[:29], data_location[:29])
    
    #align and combine images
    test_loc = data_location[:-5]
    check_saturation.check_saturate(test_loc + '/data')
    ref_image.ref_image(test_loc + '/data')
    align_astroalign.align2(test_loc + '/data')
#    align_skimage.skimage(test_loc + '/data')
    combine_numpy.combine_median(test_loc + '/data')
#    align_skimage.skimage_template(test_loc + '/data')
    
    #add three fake stars to reference image
    print("\n-> Adding fake stars to test image...")
    hdu = fits.getdata(test_loc + '/data/09:14:00.260_A_.fits')
    
    h, w = img_shape = np.shape(hdu)
    pos_x = [1500,2000,1200]
    pos_y = [1600,1400,2200]
    array = np.array([ 0.65343465,  0.50675629,  0.84946314])
    fluxes = 2000000.0 + array * 300.0
    img = np.zeros(img_shape)
    for x, y, f in zip(pos_x, pos_y, fluxes):
        img[x, y] = f
    
    img = gaussian_filter(img, sigma=15.0, mode='constant')
    
    final = fits.PrimaryHDU(hdu+img)
    final.writeto(test_loc + '/data/09:14:00.260_A_.fits', overwrite=True)
    
    #subtract images using ISIS
    subtract_ais.isis_sub_test(test_loc)
    
    #get PSFs then perform SExtractor on residual images
    sex.sextractor_psf(test_loc)
    psf.psfex(test_loc)
    sex.sextractor(test_loc)
    
    #test the results of the test function against known values
#    with open(test_loc + '/sources/sources.txt', 'r') as source:
#        lines = source.readlines()
#        source.close()
#    
#    with open(os.path.dirname(sex.__file__) + '/test_config/test_sources.txt', 'r') as test_source:
#        lines2 = test_source.readlines()
#        test_source.close()
        
    res_image_loc = os.path.dirname(sex.__file__) + '/test_config/09:14:00.260_A_residual_.fits'
    
    test_image_data = fits.getdata(res_image_loc)
    
    residual = glob.glob(test_loc + '/residuals/*_A_residual_.fits')
    
    residual_data = fits.getdata(residual[0])
#
#    if lines == lines2:
#        print("\t-> Sources matched to control")
    if test_image_data.all() == residual_data.all():
        print("-> Residuals matched to control\n-> TEST SUCCESSFUL!")
#    if lines == lines2 and test_image_data.all() == residual_data.all():
#        print("\t-> Test successful!")
    if test_image_data.all() != residual_data.all():
        print("-> Test failure: Results do not match controls")
    
    #display final residual test image
    os.system('ds9 %s -scale zscale' % (residual[0]))
예제 #4
0
    fluxes = 2000000.0 + array * 300.0
    img = np.zeros(img_shape)
    for x, y, f in zip(pos_x, pos_y, fluxes):
        img[x, y] = f
    
    img = gaussian_filter(img, sigma=15.0, mode='constant')
    
    final = fits.PrimaryHDU(hdu+img)
    final.writeto(test_loc + '/data/09:14:00.260_A_.fits', overwrite=True)
    
    #subtract images using ISIS
    subtract_ais.isis_sub_test(test_loc)
    
    #get PSFs then perform SExtractor on residual images
    sex.sextractor_psf(test_loc)
    psf.psfex(test_loc)
    sex.sextractor(test_loc)
    
    #test the results of the test function against known values
#    with open(test_loc + '/sources/sources.txt', 'r') as source:
#        lines = source.readlines()
#        source.close()
#    
#    with open(os.path.dirname(sex.__file__) + '/test_config/test_sources.txt', 'r') as test_source:
#        lines2 = test_source.readlines()
#        test_source.close()
        
    res_image_loc = os.path.dirname(sex.__file__) + '/test_config/09:14:00.260_A_residual_.fits'
    
    test_image_data = fits.getdata(res_image_loc)
    
예제 #5
0
import sex
import psf
import glob


def EXTRACT():
    path = input("-> Enter path to target's exposure time directory: ")
    images = glob.glob(path + '/data/*.fits')
    psf_data = glob.glob(path + '/psf/*')
    if len(psf_data) == 3 * len(images):
        sex.sextractor(path)
        sex.src_filter(path)
    else:
        sex.sextractor_psf(path)
        psf.psfex(path)
        sex.sextractor(path)
        sex.src_filter(path)


if __name__ == '__main__':
    path = input("-> Enter path to target's exposure time directory: ")
    images = glob.glob(path + '/data/*.fits')
    psf_data = glob.glob(path + '/psf/*')
    if len(psf_data) == 3 * len(images):
        sex.sextractor(path)
        sex.src_filter(path)
    else:
        sex.sextractor_psf(path)
        psf.psfex(path)
        sex.sextractor(path)
        sex.src_filter(path)