Пример #1
0
def int_match_to_template(location, image, template, nx=100, ny=100):
    print("\n-> Matching flux scale to template...")
    initialize.create_configs(location)
    linmatch_loc = location + '/configs/linmatch.txt'
    if os.path.exists(linmatch_loc) == False:
        os.system('touch %s' % (linmatch_loc))
    pyraf.iraf.linmatch(image + '[0]',
                        template + '[0]',
                        'grid %dx %dy' % (nx, ny),
                        linmatch_loc,
                        output=image + 'TEMP')
    temp_image = glob.glob(location + '/data/*TEMP.fits')
    temp_image_hdu = fits.open(temp_image[0])
    temp_image_data = temp_image_hdu[0].data
    image_hdu = fits.open(image)
    image_hdr = image_hdu[0].header
    image_mask = image_hdu[1].data
    temp_image_masked = np.ma.MaskedArray(temp_image_data,
                                          mask=((image_mask - 1) * -1))
    temp_image_median = np.ma.median(temp_image_masked)
    temp_image_hdu.close()
    image_hdu.close()
    hduData = fits.PrimaryHDU(temp_image_data, header=image_hdr)
    hduMask = fits.ImageHDU(image_mask)
    hduList = fits.HDUList([hduData, hduMask])
    hduList.writeto(image, overwrite=True)
    median_hdu = fits.open(image, mode='update')
    (median_hdu[0].header).set('MEDIAN', str(temp_image_median))
    median_hdu.close()
    os.system("rm %s" % (temp_image[0]))
Пример #2
0
def sextractor_psf_sim(location, image):
    psf_loc = location + "/psf"
    data = location + "/data"
    check = os.path.exists(psf_loc)
    length = len(data) + 1
    if check == False:
        os.system("mkdir %s" % (psf_loc))
    initialize.create_configs(location)
    config_loc = location + '/configs/psf.sex'
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        9] = "PARAMETERS_NAME" + "        " + location + "/configs/default.psfex" + "\n"
    data[
        20] = "FILTER_NAME" + "        " + location + "/configs/default.conv" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    print("\n-> Creating PSF catalog of fake image...")
    name = image[length:-5]
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        6] = "CATALOG_NAME" + "        " + psf_loc + "/" + name + ".cat" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    os.system("sextractor %s[0] -c %s" % (image, config_loc))
Пример #3
0
def psfex(location):
    psf_loc = location + '/psf'
    cats = glob.glob(psf_loc + '/*.cat')
    initialize.create_configs(location)
    config_loc = location + '/configs/psfex.config'
    print("\n-> Calculating PSFs...\n")
    for cat in cats:
        with open(config_loc, 'r') as config:
            data = config.readlines()
            config.close()
        data[83] = "PSF_DIR" + "        " + location + "/psf" + "\n"
        with open(config_loc, 'w') as config:
            config.writelines(data)
            config.close()
        os.system("psfex %s > %s.psf -c %s" % (cat, cat[:-4], config_loc))
Пример #4
0
def sextractor_sim(image):
    location = image.split('/')[:-2]
    location = '/'.join(location)
    sources = location + "/sources"
    check = os.path.exists(sources)
    check_temp = os.path.exists(sources + '/temp')
    if check == False:
        os.system("mkdir %s" % (sources))
        os.system("mkdir %s/temp" % (sources))
    else:
        if check_temp == False:
            os.system("mkdir %s/temp" % (sources))
    initialize.create_configs(location)
    config_loc = location + '/configs/default.sex'
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        9] = "PARAMETERS_NAME" + "        " + location + "/configs/default.param" + "\n"
    data[
        20] = "FILTER_NAME" + "        " + location + "/configs/default.conv" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    print("\n-> SExtracting fake image...")
    name = image.split('/')[-1]
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        106] = "PSF_NAME" + "        " + location + "/psf/" + name[:
                                                                   -5] + ".psf" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    os.system("sextractor %s[0]> %s/temp/%s.txt -c %s" %
              (image, sources, name, config_loc))
    temp_hdu_data = fits.PrimaryHDU((fits.getdata(image)) * -1,
                                    header=fits.getheader(image))
    temp_hdu_mask = fits.ImageHDU(fits.getdata(image, 1))
    temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])
    temp_hdu_list.writeto("%s/residuals/temp.fits")
    os.system("sextractor %s/residuals/temp.fits[0]> %s/temp/%s.txt -c %s" %
              (location, sources, name, config_loc))
    os.remove("%s/residuals/temp.fits" % (location))
    src_join(location)
    filter_sources(location)
Пример #5
0
def COMBINE(path):
    '''Stacks science images into a high *S/N* template frame. Stacking method is the weighted median value of each pixel and is done by the AstrOmatic software ``SWarp`` (E. Bertin). Only the top third of science images with respect to seeing are included in the template.
    
    :param str path: Path of data file tree (contains the **configs**, **data**, **psf**, **residuals**, **sources**, **templates** directories). Use a comma-separated list for mapping to multiple datasets.
    :returns: Weighted median coaddition of all science images is outputted into the **templates** directory with the name convention of *StackMethod_NumberOfImagesInDataset.fits*.
    
    '''
    paths = (path.replace(' ','')).split(',')
    del path
    for path in paths:
        location = path + '/data'
        if os.path.exists(path):
            initialize.create_configs(path)
            combine_swarp.swarp(location)
        else:
            print("\n-> Error: Unknown path entered\n-> Please enter the path to an existing exposure time directory\n-> Exiting...\n")
            sys.exit()
Пример #6
0
def psfex(location, images=[]):
    initialize.create_configs(location)
    config_loc = location + '/configs/psfex.config'
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[83] = "PSF_DIR" + "        " + location + "/psf" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    print("\n-> Calculating PSFs...")
    for im in images:
        cat = im.replace('data','psf')
        cat = cat.replace('templates', 'psf')
        cat = cat[:-5] + '.cat'
        os.system("psfex %s > %s.psf -c %s" % (cat, cat[:-4], config_loc))
    if images == []:
        print("-> PSFs already exist")
Пример #7
0
def phose_sex(location):
    """
    create source catalogs for each science image using SExtractor, combine them
    into a master catalog called 'phose_sources.txt' in the data directory
    
    also creates a template catalog used for phose thresholding
    """

    initialize.create_configs(location)
    try:
        os.mkdir("%s/data/cats" % (location))
    except:
        pass
    images = glob.glob("%s/data/*.fits" % (location))
    config = "%s/configs/phose_default.sex" % (location)
    param = "%s/configs/phose_default.param" % (location)
    for i in tqdm(images):
        output_cat = i.replace('data', 'data/cats')
        output_cat = output_cat.replace('fits', 'txt')
        with open(config, 'r') as conf:
            conf_lines = conf.readlines()
        conf_lines[9] = "PARAMETERS_NAME" + "        " + param + "\n"
        conf_lines[32] = "WEIGHT_IMAGE" + "        " + "%s[1]" % (i) + "\n"
        with open(config, 'w') as conf:
            conf.writelines(conf_lines)
        os.system("sextractor %s[0] > %s -c %s" % (i, output_cat, config))
    sex.src_join(location, phose=True)
    template = glob.glob("%s/templates/*.fits" % (location))
    if template == []:
        print(
            "-> Error: template not found\n-> Run combine.py first\n-> Exiting..."
        )
        sys.exit()
    template = template[0]
    output_template_cat = "%s/templates/phose_template.cat" % (location)
    with open(config, 'r') as conf:
        conf_lines = conf.readlines()
    conf_lines[9] = "PARAMETERS_NAME" + "        " + param + "\n"
    conf_lines[32] = "WEIGHT_IMAGE" + "        " + "%s[1]" % (template) + "\n"
    with open(config, 'w') as conf:
        conf.writelines(conf_lines)
    os.system("sextractor %s[0] > %s -c %s" %
              (template, output_template_cat, config))
Пример #8
0
def int_match_to_ref(location, nx=100, ny=100, use_config_file=True):
    print("\n-> Matching flux scales to reference image...")
    ref_im = glob.glob(location + '/data/*_ref_A_.fits')
    images = glob.glob(location + '/data/*_A_.fits')
    initialize.create_configs(location)
    linmatch_loc = location + '/configs/linmatch.txt'
    if os.path.exists(linmatch_loc) == False:
        os.system('touch %s' % (linmatch_loc))
    if use_config_file == True:
        nx = initialize.get_config_value('int_match_nx',
                                         file_loc=location + '/configs')
        ny = initialize.get_config_value('int_match_ny',
                                         file_loc=location + '/configs')
    for i in images:
        if i != ref_im[0]:
            try:
                fits.getval(i, 'SCALED')
            except KeyError:
                fits.setval(i, 'SCALED', value='N')
            if fits.getval(i, 'SCALED') == 'N':
                pyraf.iraf.linmatch(i + '[0]',
                                    ref_im[0] + '[0]',
                                    'grid %dx %dy' % (nx, ny),
                                    linmatch_loc,
                                    output=i + 'TEMP')
                temp_image = glob.glob(location + '/data/*TEMP.fits')
                temp_image_hdu = fits.open(temp_image[0])
                temp_image_data = temp_image_hdu[0].data
                image_hdu = fits.open(i, mode='update')
                image_hdr = image_hdu[0].header
                image_hdr.set('SCALED', 'Y')
                image_mask = image_hdu[1].data
                temp_image_hdu.close()
                image_hdu.close()
                image_hdr = fits.getheader(i)
                hduData = fits.PrimaryHDU(temp_image_data, header=image_hdr)
                hduMask = fits.ImageHDU(image_mask)
                hduList = fits.HDUList([hduData, hduMask])
                hduList.writeto(i, overwrite=True)
                os.system("rm %s" % (temp_image[0]))
Пример #9
0
def swarp(location, template_perc=0.33, use_config_file=True):
    location = location[:-5]
    temps = glob.glob(location + '/templates/*.fits')
    images = glob.glob(location + '/data/*_A_.fits')
    imNum = len(images)
    numImages = 0
    if use_config_file == True:
        template_perc = initialize.get_config_value('template_perc')
    if len(temps) == 1:
        temps_name = temps[0].split('/')[-1]
        numImages = int((temps_name.split('.'))[0].split('_')[-1])
    if len(temps) == 0 or numImages != len(images):
        #delete old template
        if len(temps) != 0:
            template_name = temps[0].split('/')[-1]
            os.remove(temps[0])
            try:
                os.remove("%s/psf/%s.cat" % (location, template_name[:-5]))
                os.remove("%s/psf/%s.psf" % (location, template_name[:-5]))
            except:
                pass
        #change image shapes to match each the smallest image in the set
        print("\n-> Slicing images to a common FOV...")
        shapes = []
        areas = []
        for i in tqdm(images):
            image_data = fits.getdata(i)
            shapes.append(image_data.shape)
            areas.append((image_data.shape)[0] * (image_data.shape)[1])
        min_index = areas.index(min(areas))
        #        correct_shape = max(set(shapes), key=shapes.count)
        correct_shape = shapes[min_index]
        print("\n-> FOV size (x,y): (%d, %d)" %
              (correct_shape[0], correct_shape[1]))
        for index in tqdm(range(len(shapes))):
            s = shapes[index]
            diff = tuple(np.subtract(s, correct_shape))
            im = images[index]
            im_hdu = fits.open(im)
            im_data = im_hdu[0].data
            im_header = im_hdu[0].header
            im_mask = (im_hdu[1].data).astype(int)
            if diff != (0, 0):
                if diff[0] < 0:
                    im_data = np.concatenate(
                        (im_data, np.zeros((diff[0] * -1, s[1]))), axis=0)
                    im_mask = np.concatenate(
                        (im_mask, np.ones((diff[0] * -1, s[1]))), axis=0)
                if diff[0] > 0:
                    im_data = im_data[:-1 * diff[0]]
                    im_mask = im_mask[:-1 * diff[0]]
                if diff[1] < 0:
                    im_data = np.concatenate(
                        (im_data, np.zeros((im_data.shape[0], diff[1] * -1))),
                        axis=1)
                    im_mask = np.concatenate(
                        (im_mask, np.ones((im_mask.shape[0], diff[1] * -1))),
                        axis=1)
                if diff[1] > 0:
                    im_data = im_data[:, :diff[1] * -1]
                    im_mask = im_mask[:, :diff[1] * -1]
                hduData = fits.PrimaryHDU(im_data, header=im_header)
                hduMask = fits.ImageHDU(im_mask.astype(int))
                hduList = fits.HDUList([hduData, hduMask])
                hduList.writeto(im, overwrite=True)
            im_hdu.close()

        #change all masks into weight maps
        print("\n-> Converting all image masks into weight maps...")
        for i in tqdm(images):
            weight = sex.weight_map(i)
            hdu = fits.open(i, mode='update')
            data = hdu[0].data
            hdr = hdu[0].header
            try:
                if hdr['WEIGHT'] == 'N':
                    hdr.set('WEIGHT', 'Y')
                    hduData = fits.PrimaryHDU(data, header=hdr)
                    hduWeight = fits.ImageHDU(weight)
                    hduList = fits.HDUList([hduData, hduWeight])
                    hduList.writeto(i, overwrite=True)
            except KeyError:
                hdr.set('WEIGHT', 'Y')
                hduData = fits.PrimaryHDU(data, header=hdr)
                hduWeight = fits.ImageHDU(weight)
                hduList = fits.HDUList([hduData, hduWeight])
                hduList.writeto(i, overwrite=True)
            hdu.close()
        # choose only the top template_perc seeing images
        try:
            FWHMs = []
            for im in images:
                FWHMs.append(psf.fwhm(im))
            template_images = []
            while len(template_images) < round(template_perc * len(images)):
                template_images.append(images[FWHMs.index(np.min(FWHMs))])
                FWHMs.remove(np.min(FWHMs))
            images = template_images
        except FileNotFoundError:
            print(
                "-> Error: PSF models do not exist, run PSF method first then try again."
            )
            sys.exit()
        initialize.create_configs(location)
        config_loc = location + '/configs/default.swarp'
        if os.path.exists(config_loc):
            template = location + "/templates/swarp_median_" + str(
                imNum) + ".fits"
            with open(config_loc, 'r') as config:
                data = config.readlines()
                config.close()
            data[4] = "IMAGEOUT_NAME" + "        " + template + "\n"
            data[
                15] = "WEIGHT_IMAGE" + "        " + "@%s/templates/weights.txt" % (
                    location) + "\n"
            data[
                36] = "IMAGE_SIZE" + "        " + "%s, %s" % correct_shape[::
                                                                           -1] + "\n"
            with open(config_loc, 'w') as config:
                config.writelines(data)
                config.close()
            time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
            og_templates = glob.glob(location + "/templates/*.fits")
            log_loc = location + "/templates/log.txt"
            tlist_loc = location + "/templates/template_inputs.txt"
            weight_list = "%s/templates/weights.txt" % (location)
            log_list = open(log_loc, "a+")
            template_list = open(tlist_loc, "w+")
            for i in images:
                template_list.write(str(i) + "[0]" + "\n")
            template_list.close()
            with open(weight_list, 'w+') as w:
                for i in images:
                    w.write("%s[1]\n" % (i))
            if images == []:
                print("-> No aligned images to combine\n")
            else:
                try:
                    print("-> Images being combined...\n")
                    os.system("swarp @%s -c %s" % (tlist_loc, config_loc))
                    log_list.write(
                        "template updated at %s UTC | method = median (SWarp) | images = %d\n"
                        % (str(time), len(images)))
                    log_list.close()
                    if len(og_templates) > 0:
                        for o in og_templates:
                            os.system("mv %s %s/OASIS/archive/templates" %
                                      (o, initialize.loc))
                    print(
                        "\n-> Image combination successful!\n-> Template log updated\n"
                    )
                except:
                    print("-> Image combination failed\n")
                    sys.exit()
            temp_hdu = fits.open(template)
            temp_data = temp_hdu[0].data
            temp_hdr = temp_hdu[0].header
            try:
                temp_mask = fits.getdata(
                    os.path.dirname(initialize.__file__) +
                    '/coadd.weight.fits')
            except:
                try:
                    temp_mask = fits.getdata(
                        os.path.dirname(initialize.__file__) +
                        '/AIS_temp/coadd.weight.fits')
                except:
                    print(
                        '-> Error: can\'t find coadd.weight.fits\n-> Exiting...'
                    )
                    sys.exit()
            mask_median = np.median(temp_mask)
            mask_std = np.std(temp_mask)
            threshold = mask_median - (mask_std)
            temp_mask[temp_mask < threshold] = 0
            temp_mask[temp_mask >= threshold] = 1
            masked_data = np.ma.masked_array(temp_data, mask=temp_mask)
            temp_median = np.ma.median(masked_data)
            temp_hduData = fits.PrimaryHDU(temp_data, header=temp_hdr)
            temp_hduMask = fits.ImageHDU(temp_mask)
            temp_hduList = fits.HDUList([temp_hduData, temp_hduMask])
            temp_hduList.writeto(template, overwrite=True)
            temp_hdu.close()
            temp_hdu = fits.open(template, mode='update')
            (temp_hdu[0].header).set('MEDIAN', str(temp_median))
            temp_hdu.close()
        else:
            print("\n-> No default.swarp file in target's config directory\n")
            sys.exit()
    else:
        print("-> Template already exists")
    try:
        os.remove(os.path.dirname(initialize.__file__) + '/coadd.weight.fits')
    except:
        pass
Пример #10
0
def MR_swarp(location):
    print("\n-> Constructing master residual...\n")
    residuals = glob.glob("%s/residuals/*residual_.fits" % (location))
    MR_loc = "%s/residuals/MR.fits" % (location)
    if residuals != []:
        if os.path.exists(MR_loc) == False:
            #first change all masks into weight maps
            print("-> Converting all residual masks into weight maps...\n")
            for r in tqdm(residuals):
                weight = sex.weight_map(r)
                hdu = fits.open(r, mode='update')
                data = hdu[0].data
                hdr = hdu[0].header
                try:
                    if hdr['WEIGHT'] == 'N':
                        hdr.set('WEIGHT', 'Y')
                        hduData = fits.PrimaryHDU(data, header=hdr)
                        hduWeight = fits.ImageHDU(weight)
                        hduList = fits.HDUList([hduData, hduWeight])
                        hduList.writeto(r, overwrite=True)
                except KeyError:
                    hdr.set('WEIGHT', 'Y')
                    hduData = fits.PrimaryHDU(data, header=hdr)
                    hduWeight = fits.ImageHDU(weight)
                    hduList = fits.HDUList([hduData, hduWeight])
                    hduList.writeto(r, overwrite=True)
                hdu.close()
                try:
                    if fits.getval(r, 'NORM') == 'N':
                        fits.setval(r, 'NORM', value='Y')
                        normalize(r)
                except KeyError:
                    fits.setval(r, 'NORM', value='Y')
                    normalize(r)
            #fill all masked values with zero
            zeros_mask(location)
            #make lists of residuals and their weight maps
            residual_list = "%s/residuals/inputs.txt" % (location)
            weight_list = "%s/templates/weights.txt" % (location)
            with open(residual_list, 'w+') as i:
                for r in residuals:
                    i.write("%s[0]\n" % (r))
            with open(weight_list, 'w+') as w:
                for r in residuals:
                    w.write("%s[1]\n" % (r))
            #customize swarp config file for MR
            initialize.create_configs(location)
            test_data = fits.getdata(residuals[0])
            correct_shape = test_data.shape
            config_loc = "%s/configs/default.swarp.MR" % (location)
            with open(config_loc, 'r') as conf:
                lines = conf.readlines()
            lines[
                4] = "IMAGEOUT_NAME" + "        " + "%s/residuals/MR.fits" % (
                    location) + "\n"
            lines[
                5] = "WEIGHTOUT_NAME" + "        " + "%s/residuals/MR_swarp_weight.fits" % (
                    location) + "\n"
            lines[
                15] = "WEIGHT_IMAGE" + "        " + "@%s/templates/weights.txt" % (
                    location) + "\n"
            lines[
                38] = "IMAGE_SIZE" + "        " + "%s, %s" % correct_shape[::
                                                                           -1] + "\n"
            with open(config_loc, 'w') as conf:
                conf.writelines(lines)
            #perform image combination
            try:
                os.system("swarp @%s/residuals/inputs.txt -c %s" %
                          (location, config_loc))
                #turn MR output weight map into binary weight map and add it as
                #the first extension to the final MR.fits file
                MR_data = fits.getdata("%s/residuals/MR.fits" % (location))
                MR_header = fits.getheader("%s/residuals/MR.fits" % (location))
                MR_mask = fits.getdata("%s/residuals/MR_swarp_weight.fits" %
                                       (location))
                mask_median = np.median(MR_mask)
                mask_std = np.std(MR_mask)
                threshold = mask_median - (mask_std)
                MR_mask[MR_mask < threshold] = 0
                MR_mask[MR_mask >= threshold] = 1
                MR_hdu = fits.PrimaryHDU(MR_data, header=MR_header)
                MR_hdu_mask = fits.ImageHDU(MR_mask)
                MR_hdu_list = fits.HDUList([MR_hdu, MR_hdu_mask])
                MR_hdu_list.writeto("%s/residuals/MR.fits" % (location),
                                    overwrite=True)
                try:
                    os.remove("%s/residuals/MR_swarp_weight.fits" % (location))
                except:
                    print("\-> Error: Could not remove MR weight map\n")
            except:
                print("-> Error: Master residual construction failed\n")
    else:
        print("-> Error: Problem with number of residuals\n")
Пример #11
0
def sim_sameField(location, mode='moffat', numIms=100, bkg_mag=22.5, fwhm_min=3, fwhm_max=6, 
                  rot_min=-2.5, rot_max=2.5, shift_min=-2, shift_max=2, scale_mult=(0,1.5),
                  scale_add=(-20,50), zero_point=25):
    '''Test **OASIS**'s ability to handle frame-by-frame variations in astronomical data and filter out false-positive sources. The procedure of the simulation is as follows:
        
        1. Copies a random science image from the specified dataset to the **simulations** directory.
        2. A source catalog of the chosen science image is made, containing information on each source's centroid location and total flux.
        3. Using this source catalog, simulations of the chosen science image are made, all with constant source flux and location, but with different backgrounds, seeing, and pointing.
        4. The set of simulated images are sent through the **OASIS Pipeline**.
        5. Low numbers of detected sources signifies a successful simulation. There are no variable objects in the simulated images, so ideally zero sources should be detected by **OASIS**.
        
        :param str location: Path of data file tree (contains the **configs**, **data**, **psf**, **residuals**, **sources**, **templates** directories). Use a comma-separated list for mapping to multiple datasets.
        :param str mode: Simulation mode. Method by which simulated images are made. All images are given a uniform background, then smeared according to Poisson statistics.
        
            * *moffat* (default): Sources are convolved with a 2D Moffat kernel.
            * *gauss*: Sources are convolved with a symmetric 2D Gaussian kernel.
            * *real*: The actual PSF model of the chosen science image is used as the convolution kernel.
            * *sky*: AstrOmatic program ``SkyMaker`` (Bertin) is used to make simulated images.
            
        :param int numIms (default=100): Number of simulated images to make.
        :param float bkg_mag: Average background level in mags. Actual simulated background levels are chosen to be a random value within the interval :math:`[bkg\_mag-1.5, bkg\_mag+1.5]`.
        :param float fwhm_min: Minimum FWHM of simulated images in pixels.
        :param float fwhm_max: Maximum FWHM of simulated images in pixels.
        :param float rot_min: Lower bound on angle of rotation in degrees.
        :param float rot_max: Upper bound on angle of rotation in degrees.
        :param float shift_min: Lower bound on (X,Y) shift in pixels.
        :param float shift_max: Upper bound on (X,Y) shift in pixels.
        :param tuple scale_mult: Interval of acceptable multiplicative scale factors.
        :param tuple scale_add: Interval of acceptable additive scale factors.
        :param float zero_point: Zero point magnitude.
        :returns: Standard **OASIS Pipeline** output, residual frames located in **residuals** and source catalogs located in **sources**.
        
    '''
    ref_im = glob.glob(location + '/data/*_ref_A_.fits')
    if os.path.exists(location) == False:
        print("-> Error: Problem with path name(s)-- make sure paths exist and are entered correctly\n-> Exiting...")
        sys.exit()
    if len(ref_im) != 1:
        print("-> Error: Problem with number of reference images\n-> Exiting...\n")
        sys.exit()
    ref_im = ref_im[0]
    ref_fwhm = fwhm(ref_im)
    path_splits = ref_im.split('/')
    image_name = path_splits[-1]
    sim_loc = location.replace('targets', 'simulations')
    len_loc = len(loc.split('/'))
    tar = path_splits[len_loc+2]
    copy_to_sim(tar, image=ref_im, mode='samefield')
    ref_psf = glob.glob("%s/psf/*_ref_A_.psf" % (sim_loc))
    if len(ref_psf) != 1:
        print("-> Error: Problem with number of reference PSF files\n-> Exiting...\n")
        sys.exit()
    try:
        clear_contents(sim_loc)
    except:
        pass
    images = glob.glob("%s/data/*.fits" % (sim_loc))
    ref_im_sim = ref_im.replace("targets", "simulations")
#delete all original images except reference
    for i in images:
        name = i.split('/')[-1]
        if name != image_name:
            os.remove(i)
#create configs directory if none exists
    create_configs(sim_loc)
#make source catalog of reference using SExtractor
    sim_config = "%s/configs/default_sim.sex" % (sim_loc)
    sim_params = "%s/configs/default_param_sim.sex" % (sim_loc)
    with  open(sim_config, 'r') as conf:
        lines = conf.readlines()
    lines[6] = "CATALOG_NAME" + "        " + "%s/data/reference.cat" % (sim_loc) + "\n"
    lines[9] = "PARAMETERS_NAME" + "        " + sim_params + "\n"
    lines[22] = "FILTER_NAME" + "        " + "%s/configs/default.conv" % (sim_loc) + "\n"
    lines[70] = "SEEING_FWHM" + "        " + str(ref_fwhm) + "\n"
    lines[127] = "PSF_NAME" + "        " + ref_psf[0] + "\n"
    with open(sim_config, 'w') as conf_write:
        conf_write.writelines(lines)
    os.system("sextractor %s[0] -c %s" % (ref_im_sim, sim_config))
#extract x_pos, y_pos, and fluxes from SExtractor catalog
    ref_cat = "%s/data/reference.cat" % (sim_loc)
    with open(ref_cat, 'r') as cat:
        cat_lines = cat.readlines()
#get simulated image's metadata
    ref_hdu = fits.open(ref_im_sim)
    ref_data = ref_hdu[0].data
    ref_header = ref_hdu[0].header
    ref_mask = ref_hdu[1].data
    try: weight_check = fits.getval(ref_im_sim, 'WEIGHT')
    except: weight_check = 'N'
    if weight_check == 'Y':
        ref_mask = (ref_mask-1)*-1
    ref_mask = ref_mask.astype(np.int64)
    ref_hdu.close()
    from astropy.stats import sigma_clipped_stats
    mean, median, std = sigma_clipped_stats(ref_data, sigma=3.0)
#extract simulated image's source information from SExtractor catalog
    x_pos = []
    y_pos = []
    flux = []
    sources = {}
    for c in cat_lines:
        splits = c.split()
        if splits[0] != '#':
            flux.append(float(splits[0]))
            x_pos.append(round(float(splits[3])))
            y_pos.append(round(float(splits[4])))
            sources.update({float(splits[0]) : (round(float(splits[3])), round(float(splits[4])))})
    flux_ordered = sorted(sources)
    flux_iter = round(len(flux)*0.99)
    flux_sim = flux_ordered[flux_iter]
    xy_sim = sources[flux_sim]
#if mode is set to use SkyMaker for making the simulations, configure SkyMaker
    if mode == 'sky':
        mags = []
        for f in flux:
            mags.append((28-(np.log(f))))
        with open("%s/configs/sky_list.txt" % (sim_loc), "w+") as sky_list:
            for i in range(len(flux)):
                sky_list.write("100 %.3f %.3f %.3f\n" % (x_pos[i], y_pos[i], mags[i]))
        #get pixel scale of reference image
        pixscale = float(ref_header['PIXSCALE'])
        #define oversampling
        oversample = pixscale*25
        #define sky.config location
        sky_config = "%s/configs/sky.config" % (sim_loc)
#start making fake images
    print("\n-> Making simulated images...")
    for n in tqdm(range(numIms)):
#define image name
        if n == 0:
            image_name = '%s/data/%d_ref_A_.fits' % (sim_loc, n)
        else:
            image_name = '%s/data/%d_N_.fits' % (sim_loc, n)
#for each image: make sources w/ random fwhm b/w (3,6), rotate/zoom, shift, add a different gaussian dist. of noise, change scale linearly, poisson smear
        #define FWHM of simulation
        image_fwhm = ((fwhm_max-fwhm_min) * np.random.random()) + fwhm_min
        #based on the mode chosen, create the corresponding convolution kernel and make simulated image
        if mode != 'sky':
            if mode == 'moffat':
                moffat_kernel_1 = Moffat2DKernel(gamma=make_stars.get_moffat_gamma(image_fwhm), alpha=7)
                moffat_kernel_2 = Moffat2DKernel(gamma=make_stars.get_moffat_gamma(image_fwhm), alpha=2)
                conv_kernel = (0.8*moffat_kernel_1) + (0.2*moffat_kernel_2)
            elif mode == 'gauss':
                gaussian_kernel_1 = Gaussian2DKernel(x_stddev=(image_fwhm/2.355), y_stddev=(image_fwhm/2.355))
                gaussian_kernel_2 = Gaussian2DKernel(x_stddev=((image_fwhm*2)/2.355), y_stddev=((image_fwhm*2)/2.355))
                conv_kernel = (0.9*gaussian_kernel_1) + (0.1*gaussian_kernel_2)
            elif mode == 'real':
                conv_kernel = get_first_model(ref_im)
            try:
                conv_kernel /= np.sum(conv_kernel)
            except:
                pass
            flux_variable = np.array(flux) * np.random.random() * 2
            image = make_stars.make_image(ref_data.shape[0], ref_data.shape[1], 
                                      x_loc=y_pos, y_loc=x_pos, fluxes=flux_variable, psf=[conv_kernel])
        #if mode is set to 'sky' use SkyMaker to make simulated image
        elif mode == 'sky':
            bkg_Mag = (1.5*np.random.random()) + bkg_mag
            image_fwhm_arcsec = image_fwhm*pixscale
            with open(sky_config, 'r') as sky:
                sky_lines = sky.readlines()
            sky_lines[6] = "IMAGE_NAME" + "        " + image_name + "\n"
            sky_lines[7] = "IMAGE_SIZE" + "        " + str("%d, %d" % (ref_data.shape[1], ref_data.shape[0])) + "\n"
            sky_lines[19] = "SATUR_LEVEL" + "        " + str(ref_header['SATURATE']) + "\n"
            sky_lines[21] = "EXPOSURE_TIME" + "        " + str(ref_header['EXPTIME']) + "\n"
            sky_lines[26] = "PIXEL_SIZE" + "        " + str(pixscale) + "\n"
            sky_lines[34] = "SEEING_FWHM" + "        " + str(image_fwhm_arcsec) + "\n"
            sky_lines[37] = "PSF_OVERSAMP" + "        " + str(oversample) + "\n"
            sky_lines[65] = "BACK_MAG" + "        " + str(bkg_Mag) + "\n"
            with open(sky_config, 'w') as sky:
                sky.writelines(sky_lines)
            os.system("sky %s/configs/sky_list.txt -c %s" % (sim_loc, sky_config))
            try:
                os.remove("%s/data/%s.list" % (sim_loc, image_name[:-5]))
            except:
                pass
            image = fits.getdata(image_name)
        else:
            print("-> Error: Please enter a valid mode (gauss, moffat, sky, real)\n-> Exiting...")
            sys.exit()
        #now we start the warping of each simulation
        #first rotate/zoom (angle is random b/w 0 and 30 degrees, zoom is random b/w 0 and 2)
        if n != 0:
            #define initial mask for each simulation
            Mask = np.zeros(image.shape)
            rot_angle = ((rot_max-rot_min)*np.random.random())+rot_min
            dx = (shift_max-shift_min) * np.random.random() - shift_min
            dy = (shift_max-shift_min) * np.random.random() - shift_min
            image = rotate(image, rot_angle, reshape=False)
            image = shift(image, [dx,dy])
            Mask = rotate(ref_mask, rot_angle, reshape=False, cval=1)
            Mask = shift(Mask, [dx,dy], cval=1)
        else:
            Mask = ref_mask
        #for non-SkyMaker simulations, add in a random background, poisson smear the image, and rescale it
        if mode != 'sky':
            #add constant background
            bkg_loc = 2.512**(zero_point - bkg_mag)
            bkg_scl = ((std+5)-(std-5))*np.random.random()+(std-5)
            bkg = np.random.normal(loc=bkg_loc, scale=bkg_scl, size=image.shape)
            image = np.add(image, bkg)
            #poisson smear
            negative_image = np.zeros(image.shape)
            negative_image[:] = image[:]
            image[image < 0] = 0
            negative_image[negative_image > 0] = 0
            image = np.random.poisson(image)
            image = image.astype(np.float64)
            negative_image *= -1
            negative_image = np.random.poisson(negative_image)
            negative_image = negative_image.astype(np.float64)
            negative_image *= -1
            image += negative_image
            #rescale image linearly
            a = ((scale_mult[1] - scale_mult[0])*np.random.random()) + scale_mult[0]
            b = (scale_add[1] - scale_add[0])*np.random.random() - scale_add[0]
            image *= a
            image += b
        #write new image to data folder in target's simulations folder
        newHDUData = fits.PrimaryHDU(image, header=ref_header)
        newHDUMask = fits.ImageHDU(Mask)
        newHDUList = fits.HDUList([newHDUData, newHDUMask])
        newHDUList.writeto(image_name, overwrite=True)
        newHDU = fits.open(image_name, mode='update')
        (newHDU[0].header).set('WEIGHT', 'N')
        (newHDU[0].header).set('SCALED', 'N')
        newHDU.close()
    os.system("mv %s %s" % (ref_im_sim, sim_loc))
    os.system("mv %s %s" % (ref_psf, sim_loc))
    os.system("mv %s %s.cat" % (ref_psf[:-4], sim_loc))
    if mode == 'sky':
        sim_lists = glob.glob("%s/data/*.list" % (sim_loc))
        for sl in sim_lists:
            os.remove(sl)
    pipeline.pipeline_run_sim(sim_loc, sim=False)
    print(flux_iter, flux_sim, xy_sim)
Пример #12
0
def sextractor(location):
    '''
    runs SExtractor on all residual images
    '''
    sources = location + "/sources"
    residuals = location + "/residuals"
    check = os.path.exists(sources)
    check_temp = os.path.exists(sources + '/temp')
    length = len(residuals) + 1
    if check == False:
        os.system("mkdir %s" % (sources))
        os.system("mkdir %s/temp" % (sources))
    else:
        if check_temp == False:
            os.system("mkdir %s/temp" % (sources))
    images = glob.glob(residuals + "/*_residual_.fits")
    initialize.create_configs(location)
    config_loc = location + '/configs/default.sex'
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        9] = "PARAMETERS_NAME" + "        " + location + "/configs/default.param" + "\n"
    data[
        20] = "FILTER_NAME" + "        " + location + "/configs/default.conv" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    print("-> Converting all residual masks into weight maps...\n")
    for r in tqdm(images):
        weight = weight_map(r)
        hdu = fits.open(r, mode='update')
        data = hdu[0].data
        hdr = hdu[0].header
        try:
            if hdr['WEIGHT'] == 'N':
                hdr.set('WEIGHT', 'Y')
                hduData = fits.PrimaryHDU(data, header=hdr)
                hduWeight = fits.ImageHDU(weight)
                hduList = fits.HDUList([hduData, hduWeight])
                hduList.writeto(r, overwrite=True)
        except KeyError:
            hdr.set('WEIGHT', 'Y')
            hduData = fits.PrimaryHDU(data, header=hdr)
            hduWeight = fits.ImageHDU(weight)
            hduList = fits.HDUList([hduData, hduWeight])
            hduList.writeto(r, overwrite=True)
        hdu.close()
        try:
            if fits.getval(r, 'NORM') == 'N':
                fits.setval(r, 'NORM', value='Y')
                MR.normalize(r)
        except KeyError:
            fits.setval(r, 'NORM', value='Y')
            MR.normalize(r)
    print("\n-> SExtracting residual images...")
    for i in tqdm(images):
        if np.std(fits.getdata(i)) != 0:
            name = i[length:-5]
            data_name = location + '/data/' + name.replace('residual_',
                                                           '') + '.fits'
            FWHM = psf.fwhm(data_name)
            im_hdu = fits.open(data_name)
            im_header = im_hdu[0].header
            saturate = im_header['SATURATE']
            pixscale = im_header['PIXSCALE']
            im_hdu.close()
            with open(config_loc, 'r') as config:
                data = config.readlines()
                config.close()
            data[51] = "SATUR_LEVEL" + "        " + str(saturate) + "\n"
            data[62] = "SEEING_FWHM" + "        " + str(FWHM) + "\n"
            data[
                106] = "PSF_NAME" + "        " + location + "/psf/" + name[:
                                                                           -9] + ".psf" + "\n"
            data[58] = "PIXEL_SCALE" + "        " + str(pixscale) + "\n"
            data[32] = "WEIGHT_IMAGE" + "        " + "%s[1]" % (i) + "\n"
            with open(config_loc, 'w') as config:
                config.writelines(data)
                config.close()
            os.system("sextractor %s[0]> %s/temp/%s.txt -c %s" %
                      (i, sources, name, config_loc))
            temp_hdu_data = fits.PrimaryHDU((fits.getdata(i)) * -1,
                                            header=fits.getheader(i))
            temp_hdu_mask = fits.ImageHDU(fits.getdata(i, 1))
            temp_hdu_list = fits.HDUList([temp_hdu_data, temp_hdu_mask])
            temp_hdu_list.writeto("%s/residuals/temp.fits" % (location))
            os.system(
                "sextractor %s/residuals/temp.fits[0]> %s/temp/%s_2.txt -c %s"
                % (location, sources, name, config_loc))
            append_negative_sources(i)
            os.remove("%s/residuals/temp.fits" % (location))
        else:
            name = i[length:-5]
            with open("%s/temp/%s.txt" % (sources, name), 'w') as bad_res_cat:
                bad_res_cat.write("# Bad residual, did not SExtract\n")
    print(
        "-> SExtracted %d images, catalogues placed in 'sources' directory\n" %
        (len(images)))
    print("-> Filtering source catalogs...\n")
    src_join(location)
    filter_sources(location)
Пример #13
0
def sextractor_psf(location):
    x = 0
    psf_loc = location + "/psf"
    data = location + "/data"
    templates = location + "/templates"
    check = os.path.exists(psf_loc)
    if check == False:
        os.system("mkdir %s" % (psf_loc))
    temps = glob.glob(templates + "/*.fits")
    images = glob.glob(data + "/*_A_.fits")
    for t in temps:
        images.append(t)
    cats = glob.glob(location + '/psf/*.cat')
    images_names = [(i.split('/')[-1])[:-5] for i in images]
    cats_names = [(c.split('/')[-1])[:-4] for c in cats]
    imageCats = [im for im in images_names if im not in cats_names]
    images = []
    if temps == []:
        temps.append('')
    for imcats in imageCats:
        if imcats == (temps[0].split('/')[-1])[:-5]:
            images.append(temps[0])
        else:
            images.append(location + '/data/' + imcats + '.fits')
    initialize.create_configs(location)
    config_loc = location + '/configs/psf.sex'
    with open(config_loc, 'r') as config:
        data = config.readlines()
        config.close()
    data[
        9] = "PARAMETERS_NAME" + "        " + location + "/configs/default.psfex" + "\n"
    data[
        19] = "FILTER_NAME" + "        " + location + "/configs/default.conv" + "\n"
    with open(config_loc, 'w') as config:
        config.writelines(data)
        config.close()
    print("\n-> Creating PSF catalogs...")
    if len(temps) == 1:
        for i in tqdm(images):
            name = i.split('/')[-1][:-5]
            hdu = fits.open(i)
            hdr = hdu[0].header
            pixscale = hdr['PIXSCALE']
            hdu.close()
            with open(config_loc, 'r') as config:
                data = config.readlines()
                config.close()
            data[
                6] = "CATALOG_NAME" + "        " + psf_loc + "/" + name + ".cat" + "\n"
            data[44] = "PIXEL_SCALE" + "        " + str(pixscale) + "\n"
            with open(config_loc, 'w') as config:
                config.writelines(data)
                config.close()
            os.system("sextractor %s[0] -c %s" % (i, config_loc))


#            x += 1
#            per = float(x)/float(len(images)) * 100
#            print("\t %.1f%% sextracted..." % (per))
        print(
            "-> SExtracted %d images, catalogues placed in 'psf' directory\n" %
            (len(images)))
    else:
        print("\n-> Error: Problem with number of template images\n")
        sys.exit()
    return images
Пример #14
0
def isis_sub(location):
    x = 0
    images = glob.glob(location + "/data/*_A_.fits")
    template = glob.glob(location + "/templates/*.fits")
    residuals = glob.glob(location + "/residuals/*residual_.fits")
    images_names = [(i.split('/')[-1])[:-5] for i in images]
    res_names = [(r.split('/')[-1])[:-14] for r in residuals]
    resids = [res for res in images_names if res not in res_names]
    ims = []
    for rs in resids:
        ims.append(location + '/data/' + rs + '.fits')
    if ims != []:
        if len(template) == 1:
            ais_loc = os.path.dirname(
                initialize.__file__) + "/AIS/package/bin/./mrj_phot"
            initialize.create_configs(location)
            ais_config_loc = location + '/configs/default_config'
            cwd = os.getcwd()
            psf_data = glob.glob(location + '/psf/*')
            template_mask = fits.getdata(template[0], 1)
            if len(psf_data) == 2 * (len(images) + 1):
                try:
                    os.mkdir(cwd + "/AIS_temp")
                except FileExistsError:
                    pass
                os.chdir(cwd + "/AIS_temp")
                length = len(location) + 5
                print("\n-> Subtracting images...")
                for i in ims:
                    int_match_to_template(location, i, template[0])
                    os.system(ais_loc + " " + i + " " + template[0] + " -c " +
                              ais_config_loc)
                    os.system(
                        "mv -f %s/AIS_temp/conv.fits %s/residuals/%sresidual_.fits"
                        % (cwd, location, i[length:-5]))
                    hdu = fits.open(location + '/residuals/' + i[length:-5] +
                                    'residual_.fits',
                                    mode='update')
                    hdr = hdu[0].header
                    hdr.set('OPTIMIZE', 'N')
                    hdu.close()
                    image_hdu = fits.open(
                        (i.replace('residual_',
                                   '')).replace('residuals', 'data'))
                    image_hduMask = np.logical_or(
                        np.logical_not(image_hdu[1].data),
                        np.logical_not(template_mask)).astype(int)
                    image_hdu.close()
                    hdu = fits.open(location + '/residuals/' + i[length:-5] +
                                    'residual_.fits')
                    data = hdu[0].data
                    hdr = hdu[0].header
                    hdu.close()
                    hduData = fits.PrimaryHDU(data, header=hdr)
                    hduMask = fits.ImageHDU(image_hduMask)
                    hduList = fits.HDUList([hduData, hduMask])
                    hduList.writeto(location + '/residuals/' + i[length:-5] +
                                    'residual_.fits',
                                    overwrite=True)
                    x += 1
                    per = float(x) / float(len(ims)) * 100
                    print("\t %.1f%% subtracted..." % (per))
            else:
                print(
                    "-> Error: Need PSFs before running subtraction\n-> Run psf.py first"
                )
                print(
                    "-> If any images have been manually removed from the data directory, delete all contents of the psf directory and run OasisPy again\n"
                )
                sys.exit()
        else:
            print("-> Subtraction failure: Template missing")
            sys.exit()
        os.chdir(cwd)
        shutil.rmtree(cwd + "/AIS_temp")
    else:
        print("-> Images have already been subtracted")