Example #1
0
def save_asymmetries(filt='f160w'):

    HFF = Table.read('output/tables/nbCGs.fits')

    asymmetries = []
    for cluster, ID in zip(HFF['cluster'], HFF['ID']):
        sci = core.open_cutout('{}/cutouts/{}_ID_{}_{}.fits'.format(
            cluster, cluster, ID, filt),
                               simple=True)

        err = core.open_cutout('{}/cutouts/{}_ID_{}_{}_noise.fits'.format(
            cluster, cluster, ID, filt),
                               simple=True)

        segmap = core.open_cutout('{}/cutouts/{}_ID_{}_segmap.fits'.format(
            cluster, cluster, ID),
                                  simple=True)

        morph = statmorph.source_morphology(sci,
                                            segmap,
                                            weightmap=err,
                                            label=ID)
        asymmetries.append(morph.asymmetry)

    HFF['asymmetry'] = asymmetries
    HFF.write('output/tables/nbCGs_asymmetries_NEW.fits')

    return
Example #2
0
def vorbin_all(cluster):
    '''
    Open the F160W cutout image and compute the Voronoi bins. Save the
    image of the Voronoi bins for subsequent masking, as well as the locations
    of the bins's luminosity weighted centroids.
    
    Parameters
    ----------
    cluster : string
        Operate on the files of this cluster.
    
    Returns
    -------
    None.
    
    '''

    os.makedirs('{}/vorbins'.format(cluster),
                exist_ok=True)  # ensure the output
    # directory for the vorbins is available

    table = Table.read('{}/{}_sample-with-use-cols.fits'.format(
        cluster, cluster))
    IDs = table['ID']

    for ID in IDs:
        (sci, dim, photnu, r_e, redshift, sma, smb, pa) = open_cutout(
            '{}/cutouts/{}_ID_{}_f160w.fits'.format(cluster, cluster, ID))
        noise = open_cutout('{}/cutouts/{}_ID_{}_f160w_noise.fits'.format(
            cluster, cluster, ID),
                            simple=True)

        # target a signal-to-noise ratio of 400
        xs, ys, binNum, xBar, yBar, SN, nPixels = vorbin_data(
            sci, noise, dim, 400)
        bins_image = binNum.reshape(dim)  # reshape the bins into an image

        outfile = '{}/vorbins/{}_ID_{}_vorbins.npz'.format(
            cluster, cluster, ID)
        np.savez(outfile,
                 image=bins_image,
                 x=xs,
                 y=ys,
                 binNum=binNum,
                 xbar=xBar,
                 ybar=yBar,
                 SN=SN,
                 nPixels=nPixels)

    return
Example #3
0
def compare_rms(cluster, filt):

    cutout_files = glob.glob('{}/cutouts/{}_ID_*_{}.fits'.format(
        cluster, cluster, filt))

    cutouts = []
    for file in cutout_files:
        file = file.replace(os.sep, '/')  # compatibility for Windows
        cutouts.append(file)

    median_signals, rmses, dims = [], [], []
    for cutout in cutouts:
        data, exptime = core.open_cutout(cutout, exp=True)
        dim = data.shape[0]
        dims.append(dim)

        electrons_image = exptime * data

        median = np.median(electrons_image)
        rms = np.sqrt(np.nanmean(np.square(electrons_image)))

        median_signals.append(median)
        rmses.append(rms)

    median_signals = np.array(median_signals)
    rmses = np.array(rmses)
    dims = np.array(dims)

    neg_mask = (median_signals < 0)
    neg_signals = np.abs(median_signals[neg_mask])
    neg_rmses = rmses[neg_mask]

    random_signals, random_rmses = sample_randomly(cluster, filt, dims)
    neg_mask = (random_signals < 0)
    neg_random_signals = np.abs(random_signals[neg_mask])
    neg_random_rmses = random_rmses[neg_mask]

    xs = [median_signals, neg_signals, random_signals, neg_random_signals]
    ys = [rmses, neg_rmses, random_rmses, neg_random_rmses]
    labels = ['signal', 'abs(signal)', 'random signal', 'abs(random signal)']
    colors = ['k', 'r', 'b', 'g']

    # plotting.plot_simple_multi(xs, ys, labels, colors,
    #                             xlabel='median signal', ylabel='RMS')

    return
Example #4
0
def isophote_data() :
    
    from photutils.isophote import EllipseGeometry
    from photutils import EllipticalAperture
    from photutils.isophote import Ellipse
    
    test = 'a2744/cutouts/a2744_ID_5830_f160w.fits'
    (data, dim, photfnu, R_e,
     redshift, sma, smb, pa) = open_cutout(test)
    (noise, _, _, _,
     _, _, _, _) = open_cutout('a2744/cutouts/a2744_ID_5830_f160w_noise.fits')
    plt.display_image_simple(data, cmap=cm.viridis)
    
    xlen, ylen = dim[1], dim[0]
    
    geometry = EllipseGeometry(x0=xlen/2, y0=ylen/2, sma=20, eps=0.5,
                               pa=70*np.pi/180)
    aper = EllipticalAperture((geometry.x0, geometry.y0), geometry.sma,
                               geometry.sma*(1 - geometry.eps), geometry.pa)
    
    # pyp.imshow(data, origin='lower')
    # aper.plot(color='white')
    
    ellipse = Ellipse(data, geometry)
    isolist = ellipse.fit_image()
    
    # print(isolist.tflux_e)
    
    isophotes = True
    if isophotes :
        plt.display_isophotes(data, isolist, cmap=cm.viridis)
        
        # from photutils.isophote import build_ellipse_model
        # model = build_ellipse_model(data.shape, isolist)
        # residual = data - model
        # plt.display_image_simple(residual, norm=None)
    
    annuli = True
    if annuli :
        from photutils import EllipticalAnnulus
        from photutils import aperture_photometry
        
        center = (isolist[0].x0, isolist[0].y0)
        # print(center)
        
        last = np.where(isolist.stop_code == 0)[0][-1]
        isolist = isolist[last]
        
        pa = isolist.pa
        # print(pa*180/np.pi)
        
        a_outs = np.arange(1e-5, isolist.sma, isolist.sma/11)
        b_outs = a_outs*(1-isolist.eps)
        
        for i in range(len(a_outs) - 1) :
            a_in = a_outs[i]
            a_out = a_outs[i+1]
            b_in = b_outs[i]
            b_out = b_outs[i+1]
            # print(a_in, a_out, b_in, b_out)
            aper = EllipticalAnnulus(center, a_in, a_out, b_out, b_in=b_in,
                                     theta=isolist.pa)
            phot_table = aperture_photometry(data, aper, error=noise)
            flux = phot_table['aperture_sum'][0]
            flux_err = phot_table['aperture_sum_err'][0]
            # print(flux)
            # print(flux_err)
            
            annulus_mask = aper.to_mask()
            
            annulus_data = annulus_mask.multiply(data)
            # plt.display_image_simple(annulus_data, cmap=cm.viridis, norm=None)
            # print(np.sum(annulus_data))
            
            err_table = aperture_photometry(noise, aper)
            flux_err_alt = err_table['aperture_sum'][0]
            # print(flux_err)
            
            err_data = annulus_mask.multiply(noise)
            # print(np.sum(err_data))
            
            # print(flux/flux_err)
            print(flux/flux_err_alt)
            # print()
            
    return
Example #5
0
import os
import numpy as np

from astropy.table import Table
from matplotlib import cm
from matplotlib.colors import LogNorm
import prospect.io.read_results as reader

from core import open_cutout
import plotting as plt

# TESTS #

(sci, dim, photfnu, R_e,
 redshift, sma, smb, pa) = open_cutout('a2744/cutouts/a2744_ID_5830_f160w.fits')
noise, _, _, _, _, _, _, _ = open_cutout('a2744/cutouts/a2744_ID_5830_f160w_noise.fits')

# plt.display_image_simple(sci, bad='black')
# plt.display_image_simple(noise, bad='black', vmax=0.001)
# plt.display_image_simple(sci/noise, bad='black', vmax=450)

vorbins = np.load('a2744/vorbins/a2744_ID_5830_vorbins.npz')
bins_image = vorbins['image']
plt.display_image_simple(bins_image[25:80, 25:80], bad='black', cmap=cm.prism,
                          norm=None)

# PLOT RESULTS FROM FITTING #
cluster = 'a2744'
inDir = cluster + '/results/'
vorbinDir = cluster + '/vorbins/'
Example #6
0
def determine_fluxes(cluster, filters, subpop='nbCGs'):
    '''
    Determine the flux in every annulus/bin for a given object for a given
    filter. Then move to the next subsequent filter and determine the fluxes
    in the corresponding bins for that filter. Repeat for all filters.
    Create a table which includes all determined fluxes. Save table to file
    for subsequent use with Prospector.
    
    Parameters
    ----------
    cluster : string
        Operate on the files of this cluster.
    filters : list
        The filterset for the cluster.
    
    Returns
    -------
    None.
    
    '''

    if subpop == 'nbCGs':
        use_table = Table.read('{}/{}_{}.fits'.format(cluster, cluster,
                                                      subpop))

    use_columns = [col for col in use_table.colnames if col.endswith('_use')]
    IDs = use_table['id']

    os.makedirs('{}/photometry'.format(cluster), exist_ok=True)  # ensure the
    # output directory for the photometric tables is available

    for ID in IDs:
        row = np.where(IDs == ID)
        use_vals = (np.array(list(use_table[use_columns][row][0])) == 'TRUE')

        use_dict = dict(zip(use_columns, use_vals))

        outfile = '{}/photometry/{}_ID_{}_photometry.fits'.format(
            cluster, cluster, ID)

        bin_data = np.load('{}/bins/{}_ID_{}_annuli.npz'.format(
            cluster, cluster, ID))
        bins_image = bin_data['image']
        sma, smb = bin_data['sma'], bin_data['smb']
        flux, err = bin_data['flux'], bin_data['err']
        nPixels = bin_data['nPixels']
        widths, PAs = bin_data['width'], bin_data['pa']

        numBins = np.nanmax(bins_image) + 1  # accounts for python 0-index

        FUV_mag, U_mag = use_table['M_AB_FUV'][row], use_table['M_AB_U'][row]
        V_mag, J_mag = use_table['M_AB_V'][row], use_table['M_AB_J'][row]

        if not np.isnan(numBins):
            photometry = Table()
            photometry['bin'] = range(int(numBins))
            photometry['sma'], photometry['smb'] = sma, smb
            photometry['flux'], photometry['err'] = flux, err
            photometry['SN'], photometry['nPixels'] = flux / err, nPixels
            photometry['width'], photometry['PA'] = widths, PAs

            photometry['FUV_mag'] = [FUV_mag] * int(numBins)
            photometry['U_mag'] = [U_mag] * int(numBins)
            photometry['V_mag'] = [V_mag] * int(numBins)
            photometry['J_mag'] = [J_mag] * int(numBins)

            for filt in filters:
                sci_file = '{}/cutouts/{}_ID_{}_{}.fits'.format(
                    cluster, cluster, ID, filt)
                noise_file = '{}/cutouts/{}_ID_{}_{}_noise.fits'.format(
                    cluster, cluster, ID, filt)
                segmap_file = '{}/cutouts/{}_ID_{}_segmap.fits'.format(
                    cluster, cluster, ID)

                (sci, dim, photfnu, r_e, redshift, sma, smb,
                 pa) = open_cutout(sci_file)
                noise, _, _, _, _, _, _, _ = open_cutout(noise_file)
                segMap, _, _, _, _, _, _, _ = open_cutout(segmap_file)

                # make a copy of the science image and noise image
                new_sci = sci.copy()
                new_noise = noise.copy()

                # mask the copied images based on the segmentation map, but
                # don't mask out the sky
                if ID >= 20000:  # the bCGs aren't in the segmap, so mask
                    new_sci[segMap > 0] = 0  # any other galaxy
                    new_noise[segMap > 0] = 0
                else:  # for the non-bCGs, mask out pixels associated with
                    new_sci[(segMap > 0)
                            & (segMap != ID)] = 0  # other galaxies
                    new_noise[(segMap > 0) & (segMap != ID)] = 0

                # save relevant information for running prospector into the table
                length = len(range(int(numBins)))
                photometry['R_e'] = [r_e] * length * u.pix
                photometry['z'] = [redshift] * length
                lumDist = cosmo.luminosity_distance(redshift)
                photometry['lumDist'] = [lumDist.value] * length * u.Mpc

                fluxes, uncerts, invalid = [], [], []
                for val in range(int(numBins)):
                    # mask = np.where(bins_image == val)
                    temp_sci, temp_noise = new_sci.copy(), new_noise.copy()

                    # masked_sci = new_sci[mask]
                    # flux = photfnu*np.nansum(masked_sci)
                    temp_sci[bins_image != val] = np.nan
                    flux = photfnu * np.nansum(temp_sci)
                    fluxes.append(flux)

                    # masked_noise = new_noise[mask]
                    # uncert = photfnu*np.sqrt(np.nansum(np.square(masked_noise)))
                    temp_noise[bins_image != val] = np.nan
                    uncert = photfnu * np.sqrt(np.nansum(
                        np.square(temp_noise)))
                    uncerts.append(uncert)

                    pix_sci = temp_sci.copy()
                    pix_sci[pix_sci != 0] = np.nan
                    pix_sci[pix_sci == 0] = 1
                    invalid_pix = np.nansum(pix_sci)
                    invalid.append(invalid_pix)

                photometry[filt + '_flux'] = fluxes * u.Jy
                photometry[filt + '_err'] = uncerts * u.Jy

                valid = nPixels - np.array(invalid)
                photometry[filt + '_nPix'] = np.int_(valid)

                for key, value in use_dict.items():
                    if filt in key.split('_')[0]:
                        photometry[key] = [value] * length

            photometry.write(outfile)

    return
Example #7
0
def save_pngs(cluster, filters, population):

    num_filts = len(filters)
    if num_filts == 7:
        nrows, ncols = 3, 3
    elif num_filts == 9:
        nrows, ncols = 3, 3
    elif num_filts == 12:
        nrows, ncols = 3, 4
    elif num_filts == 16:
        nrows, ncols = 4, 4
    else:
        nrows, ncols = 4, 5

    # outDir = '{}/pngs'.format(cluster)
    outDirAlt = '{}/images_cutouts_segmapped'.format(cluster)

    # os.makedirs('{}'.format(outDir), exist_ok=True) # ensure the output
    # directory for the pngs is available
    os.makedirs('{}'.format(outDirAlt), exist_ok=True)

    # open the table of all the objects
    table = Table.read('{}/{}_sample.fits'.format(cluster, cluster))

    # mask the table based on the population of interest
    table = table[table['pop'] == population]

    # determine the band flags that are in the catalog
    band_flags = [string for string in table.colnames if 'flag_F' in string]

    # use only those columns to create a new subtable
    flag_table = Table([table[band_flag] for band_flag in band_flags],
                       names=tuple(band_flags))

    # create a list of the flags per band for each galaxy
    flags = []
    for row in flag_table.iterrows():
        flags.append(list(row))

    IDs = list(table['id'])
    for i in range(len(IDs)):
        segPath = '{}/cutouts/{}_ID_{}_segmap.fits'.format(
            cluster, cluster, IDs[i])
        segMap = core.open_cutout(segPath, simple=True)

        # outfile = outDir + '/{}_ID_{}.png'.format(cluster, IDs[i])
        outfile_segmapped = outDirAlt + '/{}_ID_{}.png'.format(cluster, IDs[i])

        # cutout_data = []
        cutout_segmapped = []
        for filt in filters:
            infile = '{}/cutouts/{}_ID_{}_{}.fits'.format(
                cluster, cluster, IDs[i], filt)
            data = core.open_cutout(infile, simple=True)
            # cutout_data.append(data)

            if IDs[i] > 20000:
                mask = (segMap > 0)
            else:
                mask = (segMap > 0) & (segMap != IDs[i])
            segmapped_data = data.copy()
            segmapped_data[mask] = 0
            cutout_segmapped.append(segmapped_data)

        # plt.display_cutouts(cutout_data, nrows, ncols, filters, flags[i],
        #                     outfile, save=True)
        plt.display_cutouts(cutout_segmapped,
                            nrows,
                            ncols,
                            filters,
                            flags[i],
                            outfile_segmapped,
                            save=True)

    return
Example #8
0
def save_bkgshists(cluster, filters, population):

    num_filts = len(filters)
    if num_filts == 9:
        nrows, ncols = 3, 3
    elif num_filts == 12:
        nrows, ncols = 3, 4
    elif num_filts == 16:
        nrows, ncols = 4, 4
    else:
        nrows, ncols = 4, 5

    outDir = '{}/images_bkg_dists'.format(cluster)
    os.makedirs(outDir, exist_ok=True)  # ensure the output direc. is available

    # now loop here over all the IDs that are available
    # open the table of all the objects
    clustable = Table.read('{}/{}_sample.fits'.format(cluster, cluster))

    # mask the table based on the population of interest
    clustable = clustable[clustable['pop'] == population]

    IDs = list(clustable['id'])

    for i in range(len(IDs)):
        outfile = '{}/images_bkg_dists/{}_ID_{}.png'.format(
            cluster, cluster, IDs[i])

        segPath = '{}/cutouts/{}_ID_{}_segmap.fits'.format(
            cluster, cluster, IDs[i])
        segMap = core.open_cutout(segPath, simple=True)

        cutout_segmapped_data = []
        medians = []
        bins = []
        for filt in filters:
            infile = '{}/cutouts/{}_ID_{}_{}.fits'.format(
                cluster, cluster, IDs[i], filt)
            data = core.open_cutout(infile, simple=True)

            if IDs[i] > 20000:
                mask = (segMap > 0)
            else:
                mask = (segMap > 0) & (segMap != IDs[i])
            segmapped_data = data.copy()
            segmapped_data[mask] = np.nan

            segmapped_data = segmapped_data.flatten()
            non_nan_data = segmapped_data[~np.isnan(segmapped_data)]

            median = np.median(non_nan_data)
            num_bins = int(np.ceil(np.cbrt(len(non_nan_data))))  # Rice rule
            # see https://en.wikipedia.org/wiki/Histogram#Rice_Rule

            cutout_segmapped_data.append(non_nan_data)
            medians.append(median)
            bins.append(num_bins)

        plt.display_hists(cutout_segmapped_data,
                          nrows,
                          ncols,
                          filters,
                          medians,
                          bins,
                          outfile,
                          save=True)

    return
Example #9
0
def check_SNR(filt):

    clusters = ['a370', 'a1063', 'a2744', 'm416', 'm717', 'm1149']

    SNRs_lo, SNRs_med, SNRs_hi = [], [], []
    for cluster in clusters:

        inDir = '{}/cutouts'.format(cluster)

        cluster_table = Table.read('{}/{}_sample.fits'.format(
            cluster, cluster))

        mask = (cluster_table['id'] < 20000) & (cluster_table['pop'] == 'Q')
        cluster_table = cluster_table[mask]

        lo_mask = cluster_table['lmass'] < 8.78
        med_mask = ((cluster_table['lmass'] >= 9.52) &
                    (cluster_table['lmass'] <= 9.76))
        hi_mask = cluster_table['lmass'] >= 10.5

        lo_table = cluster_table.copy()
        med_table = cluster_table.copy()
        hi_table = cluster_table.copy()

        lo_table = lo_table[lo_mask]
        med_table = med_table[med_mask]
        hi_table = hi_table[hi_mask]

        for ID in lo_table['id']:
            sci_file = '{}/{}_ID_{}_{}.fits'.format(inDir, cluster, ID, filt)
            noise_file = '{}/{}_ID_{}_{}_noise.fits'.format(
                inDir, cluster, ID, filt)
            segmap_file = '{}/{}_ID_{}_segmap.fits'.format(inDir, cluster, ID)

            try:
                sci = core.open_cutout(sci_file, simple=True)
                noise = core.open_cutout(noise_file, simple=True)
                segmap = core.open_cutout(segmap_file, simple=True)

                SNR = sci / noise
                SNR[(segmap != ID)] = np.nan
                SNR_flat = SNR.flatten()

                SNR_flat = SNR_flat[~np.isnan(SNR_flat)]  # remove nans

                for val in SNR_flat:
                    SNRs_lo.append(val)
            except:
                pass

        for ID in med_table['id']:
            sci_file = '{}/{}_ID_{}_{}.fits'.format(inDir, cluster, ID, filt)
            noise_file = '{}/{}_ID_{}_{}_noise.fits'.format(
                inDir, cluster, ID, filt)
            segmap_file = '{}/{}_ID_{}_segmap.fits'.format(inDir, cluster, ID)

            try:
                sci = core.open_cutout(sci_file, simple=True)
                noise = core.open_cutout(noise_file, simple=True)
                segmap = core.open_cutout(segmap_file, simple=True)

                SNR = sci / noise
                SNR[(segmap != ID)] = np.nan
                SNR_flat = SNR.flatten()

                SNR_flat = SNR_flat[~np.isnan(SNR_flat)]

                for val in SNR_flat:
                    SNRs_med.append(val)
            except:
                pass

        for ID in hi_table['id']:
            sci_file = '{}/{}_ID_{}_{}.fits'.format(inDir, cluster, ID, filt)
            noise_file = '{}/{}_ID_{}_{}_noise.fits'.format(
                inDir, cluster, ID, filt)
            segmap_file = '{}/{}_ID_{}_segmap.fits'.format(inDir, cluster, ID)

            try:
                sci = core.open_cutout(sci_file, simple=True)
                noise = core.open_cutout(noise_file, simple=True)
                segmap = core.open_cutout(segmap_file, simple=True)

                SNR = sci / noise
                SNR[(segmap != ID)] = np.nan
                SNR_flat = SNR.flatten()

                SNR_flat = SNR_flat[~np.isnan(SNR_flat)]

                for val in SNR_flat:
                    SNRs_hi.append(val)
            except:
                pass

    lo_label = r'$\log(M_{*}/M_\odot) < 8.78$'
    med_label = r'$9.52 \leq \log(M_{*}/M_\odot) \leq 9.76$'
    hi_label = r'$\log(M_{*}/M_\odot) \geq 10.5$'

    plt.histogram_multi([SNRs_lo, SNRs_med, SNRs_hi],
                        '{} SNR'.format(filt),
                        bins=[70, 70, 70],
                        log=True,
                        histtype='step',
                        colors=['k', 'g', 'm'],
                        labels=[lo_label, med_label, hi_label],
                        styles=[':', '--', '-'],
                        xmin=0.1,
                        xmax=1300,
                        ymin=1,
                        ymax=2e5)

    return
Example #10
0
def degenerate_results(cluster, ID, loc=0, save=False, version='') :
    
    if save :
        os.makedirs('{}/images_degen_plots'.format(cluster), # ensure the output
                    exist_ok=True) # directory for the figures is available
    
    '''
    # get a list of fits files containing photometric data for all bins for
    # a given galaxy, as denoted by ID
    photometries = '{}/photometry/{}_ID_*_photometry.fits'.format(cluster,
                                                                  cluster)
    phot_files = glob.glob(photometries)
    
    phots = []
    for file in phot_files :
        file = file.replace(os.sep, '/') # compatibility for Windows
        phots.append(file)
    
    # loop over all the fits files in the directory
    for file in phots :
        ID = file.split('_')[2] # the galaxy ID to fit the bins for
    '''
    
    f435w = open_cutout('{}/cutouts/{}_ID_{}_f435w.fits'.format(
        cluster, cluster, ID), phot=True)
    f606w = open_cutout('{}/cutouts/{}_ID_{}_f606w.fits'.format(
        cluster, cluster, ID), phot=True)
    f814w = open_cutout('{}/cutouts/{}_ID_{}_f814w.fits'.format(
        cluster, cluster, ID), phot=True)
    f105w = open_cutout('{}/cutouts/{}_ID_{}_f105w.fits'.format(
        cluster, cluster, ID), phot=True)
    f125w = open_cutout('{}/cutouts/{}_ID_{}_f125w.fits'.format(
        cluster, cluster, ID), phot=True)
    f140w = open_cutout('{}/cutouts/{}_ID_{}_f140w.fits'.format(
        cluster, cluster, ID), phot=True)
    f160w = open_cutout('{}/cutouts/{}_ID_{}_f160w.fits'.format(
        cluster, cluster, ID), phot=True)
    
    rgb = make_lupton_rgb(7e7*(f125w + f140w + f160w),
                          7e7*(f814w + f105w),
                          7e7*(f435w + f606w))
    
    file = '{}/photometry/{}_ID_{}_photometry.fits'.format(cluster, cluster, ID)
    
    table = Table.read(file)
    bins = table['bin'] # get a list of bin values
    sma, smb = table['sma'], table['smb']
    R_e, width = table['R_e'], table['width']
    
    # xs = (sma - 0.5*width)/R_e
    # xerrs = 0.5*width/R_e
    
    xs = (sma - width)*np.sqrt(smb/sma)/R_e
    xerrs_lo, xerrs_hi = np.zeros(len(xs)), width*np.sqrt(smb/sma)/R_e
    
    angSize = (R_e[0]*u.pix).to(u.arcsec, hst_pixelscale)
    physSize = angSize/cosmo.arcsec_per_kpc_comoving(table['z'][0])
    plot_title = ('{} ID {}'.format(cluster, ID) +
                  r' [$R_{\rm e}$' +
                  ' = {:.3f} = {:.3f} at z = {}]'.format(angSize,
                                                         physSize.to(u.kpc),
                                                         table['z'][0]))
    
    FUV_mag = table['FUV_mag'][0]
    V_mag = table['V_mag'][0]
    J_mag = table['J_mag'][0]
    
    (metal_median, metal_lo, metal_hi,
     dust_median, dust_lo, dust_hi,
     mwa_median, mwa_lo, mwa_hi,
     metal_best, dust_best, mwa_best) = determine_lines(cluster, ID, bins,
                                                        best=True,
                                                        version=version)
    
    metal_slope, metal_int = np.polyfit(xs, metal_median, 1)
    dust_slope, dust_int = np.polyfit(xs, dust_median, 1)
    mwa_slope, mwa_int = np.polyfit(xs, mwa_median, 1)
    # print(mwa_slope)
    
    outfile = '{}/images_degen_plots/{}_ID_{}_new.pdf'.format(cluster, cluster, ID)
    q_xi, q_yi, q_z, sf_xi, sf_yi, sf_z = checks.load_FUVVJ_contours()
    
    plt.plot_degeneracies([xs, xs, xs],
                          [xerrs_lo, xerrs_lo, xerrs_lo],
                          [xerrs_hi, xerrs_hi, xerrs_hi],
                          [metal_median, dust_median, mwa_median],
                          [metal_lo, dust_lo, mwa_lo],
                          [metal_hi, dust_hi, mwa_hi],
                          [metal_best, dust_best, mwa_best],
                          [metal_slope, dust_slope, mwa_slope],
                          [metal_int, dust_int, mwa_int],
                          q_xi, q_yi, q_z, sf_xi, sf_yi, sf_z,
                          V_mag-J_mag, FUV_mag-V_mag, rgb,
                          labels=['best fit/MAP', 'linear fit',
                                  r'median$\pm 1\sigma$'],
                          xlabel=r'Radius ($R_{\rm e}$)',
                          list_of_ylabels=[r'$\log(Z/Z_{\odot})$',
                                           r'Dust ($\hat{\tau}_{\lambda, 2}$)',
                                           'MWA (Gyr)'], title=plot_title,
                          xmin=-0.05, xmax=(xs[-1] + xerrs_hi[-1] + 0.05),
                          outfile=outfile, save=False, loc=loc)
    
    return
Example #11
0
def annuli_bins(cluster, ID):
    '''
    Determine the annuli to use for subsequent analysis.
    
    Parameters
    ----------
    f160w_file : string
        The F160W science file to bin.
    noise_file : string
        The noise file used in determining the bins.
    segmap_file : string
        Segmentation map file used to mask the above files.
    ID : int
        ID of the galaxy that is being binned.
    
    Returns
    -------
    annuli_map : numpy.ndarray
        DESCRIPTION.
    smas : numpy.ndarray
        The semi-major axis of the inner edge of each annulus.
    smbs : numpy.ndarray
        The semi-minor axis of the inner edge of each annulus.
    fluxes : list
        Flux contained in each annulus.
    errs : list
        Uncertainty on the flux contained in each annulus.
    nPixels_list : list
        Number of pixels contained in each annulus.
    widths : numpy.ndarray
        Widths of each annulus.
    pas : numpy.ndarray
        Array of position angles for the annuli.
    
    '''

    IR_pixel_scale = 0.128  # arcsec/pixel
    image_pixel_scale = 0.06  # arcsec/pixel
    targetSN = 30  # the target signal-to-noise ratio to use
    rin = 0  # the starting value for the semi-major axis of the ellipse

    (sci, dim, photnu, r_e, redshift, sma, smb, pa_deg) = open_cutout(
        '{}/cutouts/{}_ID_{}_f160w.fits'.format(cluster, cluster, ID))
    noise = open_cutout('{}/cutouts/{}_ID_{}_f160w_noise.fits'.format(
        cluster, cluster, ID),
                        simple=True)
    segMap = open_cutout('{}/cutouts/{}_ID_{}_segmap.fits'.format(
        cluster, cluster, ID),
                         simple=True)

    eta = 1 - smb / sma  # the ellipticity of the ellipse

    # determine the center of the ellipse, based on the size of the cutout
    xx, yy = np.indices(dim)
    x0, y0 = np.median(xx), np.median(yy)

    # make a copy of the science image and noise image
    new_sci = sci.copy()
    new_noise = noise.copy()

    # mask the copied images based on the segmentation map, but don't mask out
    # the sky
    if ID >= 20000:  # the bCGs aren't in the segmap, so mask any other galaxy
        new_sci[segMap > 0] = 0
        new_noise[segMap > 0] = 0
    else:  # for the non-bCGs, mask out pixels associated with other galaxies
        new_sci[(segMap > 0) & (segMap != ID)] = 0
        new_noise[(segMap > 0) & (segMap != ID)] = 0

    dr = max(IR_pixel_scale / image_pixel_scale, 0.1 * r_e)
    pa = np.pi * pa_deg / 180

    rins, fluxes, errs, widths, annuli, nPixels_list = [], [], [], [], [], []
    while rin < 5 * r_e:
        (flux, err, rnew, width, annulus,
         nPixels) = compute_annuli(new_sci, new_noise, dim, (x0, y0), rin, dr,
                                   eta, pa, targetSN)
        if rnew < 5 * r_e:
            fluxes.append(flux)
            errs.append(err)
            rins.append(rnew)
            widths.append(width)
            annuli.append(annulus)
            nPixels_list.append(nPixels)
        rin = rnew

    # create the annuli map for subsequent determination of photometric fluxes
    annuli_map = np.zeros(dim)
    for i in range(len(annuli)):
        annuli_map += (i + 1) * annuli[i]
    annuli_map[annuli_map == 0] = np.nan
    annuli_map -= 1

    smas = np.array(rins)  # the semi-major axes of the inner annuli
    smbs = (1 - eta) * smas  # the semi-minor axes of the inner annuli
    widths = np.array(widths)
    pas = np.array([pa_deg] * len(smas))

    return annuli_map, smas, smbs, fluxes, errs, nPixels_list, widths, pas