コード例 #1
0
ファイル: dr1.py プロジェクト: gbrammer/unicorn
def check_backgrounds():
    """
    UDF
    
    Plot the automatically-determined backgrounds as a function of position
    in the UDF frame
    """
    c = catIO.Readfile('../F140W/HUDF12-F140W.reform.cat')
    ok = c.mag_auto < 27.5
    
    fp = open('udf_backgrounds.dat','w')
    fp.write('# id c0 cx cy x y mag\n')
    for i in np.arange(c.N)[ok]:
        id = c.number[i]
        bgfile = 'UDF_FIT/UDF_%05d.bg.dat' %(id)
        if os.path.exists(bgfile):
            line = open(bgfile).readlines()[1][:-1]
            fp.write('%s  %7.1f %7.1f  %.2f\n' %(line, c.x_image[i], c.y_image[i], c.mag_auto[i]))
    
    fp.close()
    
    bg = catIO.Readfile('udf_backgrounds.dat')
    
    ok = (bg.mag > 24) & (bg.c0+0.003 > 0) & (bg.c0+0.003 < 0.004)
    plt.scatter(bg.x[ok], bg.y[ok], c=bg.c0[ok], s=30, vmin=-0.004, vmax=0.002)
    
    #### Try 2D as in sciypy example (need newer version)
    # from scipy.interpolate import griddata
    # 
    # points = [bg.x[ok], bg.y[ok]]
    # values = bg.c0[ok]
    # 
    # grid_z2 = griddata(points, values, (bg.x, bg.y), method='cubic')
    
    from scipy import interpolate
    bg_spline = interpolate.SmoothBivariateSpline(bg.x[ok], bg.y[ok], bg.c0[ok], kx=4, ky=4)
    
    test = bg.c0*0.
    for i in range(bg.N):
        test[i] = bg_spline(bg.x[i], bg.y[i])
    #
    plt.scatter(bg.x+3600, bg.y, c=test, s=30, vmin=-0.004, vmax=0.002)
        
    plt.text(2000,3800,'Data (mag > 24)', ha='center', va='center')
    plt.text(5600,3800,'Interpolated', ha='center', va='center')
    plt.savefig('background_spline.pdf')
    
    import cPickle as pickle
    fp = open('background_spline.pkl','wb')
    pickle.dump(bg_spline, fp)
    fp.close()
コード例 #2
0
def fit_goodsn():
    import unicorn.galfit
    import threedhst

    tar_path = '/research/HST/GRISM/3DHST/GOODS-N/HTML_TAR/'

    catalogs = glob.glob(
        '/research/HST/GRISM/3DHST/GOODS-N/HTML_v1.0/*drz.cat')
    psfs = glob.glob('star_ib*fits')
    matches = glob.glob(
        '/research/HST/GRISM/3DHST/GOODS-N/HTML_v1.0/SED/*match.cat')
    for i, catalog in enumerate(catalogs):
        cat = threedhst.sex.mySexCat(catalog)
        mag = np.cast[float](cat.MAG_F1392W)

        match = catIO.Readfile(matches[i])

        q = []
        for id in match.id_f140w:
            mat = np.where(cat.id == id)[0][0]
            q.append(mat)

        q = np.array(q)
        use = q[(match.logm > 10.6) & (match.rmatch < 1)]

        root = os.path.basename(catalog).split('_drz')[0]
        for id in cat.id[use]:
            status = unicorn.galfit.go_fit(id=id,
                                           fix_n=False,
                                           tar_root=root,
                                           tar_path=tar_path,
                                           PSF_IMAGE=psfs[i])
コード例 #3
0
def fit_uds_marshall():
    import unicorn.galfit
    import threedhst

    tar_path = '/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/images/'
    catalogs = glob.glob('/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/*drz.cat')
    PSF_IMAGE = 'star_UDS_direct1_00360.thumb.fits'
    matches = glob.glob(
        '/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/SED/*match.cat')

    for i, catalog in enumerate(catalogs):
        cat = threedhst.sex.mySexCat(catalog)
        mag = np.cast[float](cat.MAG_F1249W)
        use = mag < 24.5
        root = os.path.basename(catalog).split('_drz')[0]

        match = catIO.Readfile(matches[i])
        q = []
        for id in match.id_f140w:
            mat = np.where(cat.id == id)[0][0]
            q.append(mat)

        q = np.array(q)
        use = q[(match.logm >= 10.9) & (match.rmatch < 1)]

        for id in cat.id[use]:
            if not os.path.exists('%s_%05d_galfit.png' % (root, id)):
                status = unicorn.galfit.go_fit(id=id,
                                               fix_n=False,
                                               tar_root=root,
                                               fit_sky=False,
                                               tar_path=tar_path,
                                               PSF_IMAGE=PSF_IMAGE)
コード例 #4
0
def fit_cosmos():
    import unicorn.galfit
    import threedhst

    sexCat = threedhst.sex.mySexCat('orient1_drz.cat')
    mag = np.cast[float](sexCat.MAG_F1392W)
    use = mag < 23.5

    root = 'orient1'

    #### matched catalog, do all objects that have NMBS matches to some K limit
    match = catIO.Readfile('../../../../COSMOS/HTML_v1.0/SED/' + root +
                           '_match.cat')
    q = []
    for id in match.id_f140w:
        mat = np.where(sexCat.id == id)[0][0]
        q.append(mat)

    q = np.array(q)
    use = q[(match.mag_ktot < 23) & (match.rmatch < 1) & (match.logm >= 10.9)]

    for id in sexCat.id[use]:
        try:
            #
            status = unicorn.galfit.go_fit(id=id,
                                           fix_n=False,
                                           fit_sky=False,
                                           tar_root=root,
                                           PSF_IMAGE='star_' + root +
                                           '_thumb.fits')
            # status = unicorn.galfit.go_fit(id=id, fix_n=False,
            #       tar_root='orient2',
            #       PSF_IMAGE='star_orient2_01732_thumb.fits')
        except:
            pass
コード例 #5
0
ファイル: dr1.py プロジェクト: gbrammer/unicorn
def extract_new_redshifts(PATH='./'):
    """
    UDF 
    
    The fitting code saves a FITS file with p(z) but doesn't print the 
    results.  Pull it out and make a catalog
    """
    import unicorn.interlace_test as test
    import unicorn.catalogs2 as cat2
    
    c = catIO.Readfile('../F140W/HUDF12-F140W.reform.cat')
    zsp = cat2.SpeczCatalog()
    dr, idx = zsp.match_list(c.x_world, c.y_world)
    c.z_spec = zsp.zspec[idx] #[dr < 0.3]
    c.z_spec[dr > 0.3] = -1
    
    ok = c.mag_auto < 26
    
    original_path = os.getcwd()
    
    fp = open('udf_redshifts.dat','w')
    fp.write('# id mag flag z_max z_peak l68 u68 l95 u95 z_spec z_source\n')
    fp.write('# %s' %(PATH))
    
    os.chdir(PATH)
    
    for i in range(ok.sum()):
        id = c.number[ok][i]
        z_spec = c.z_spec[ok][i]
        if z_spec > 0:
            z_source = zsp.source[idx][ok][i]
        else:
            z_source = '-'
        #
        logstr = 'UDS_%05d  %.2f ' %(id, c.mag_auto[ok][i])
        if os.path.exists('UDF_%05d.new_zfit.pz.fits' %(id)):
            self = test.SimultaneousFit('UDF_%05d' %(id))
            self.read_master_templates()
            status = self.new_load_fits()
            if status:
                logstr += ' 1 %.4f %.4f %.4f %.4f %.4f %.4f  %.4f %s' %(self.z_max_spec, self.z_peak_spec, self.c68[0], self.c68[1], self.c95[0], self.c95[1], z_spec, z_source)
            else:
                logstr += '-1 %.4f %.4f %.4f %.4f %.4f %.4f  %.4f %s' %(-1, -1, -1, -1, -1, -1, -1, '-')
        else:
            logstr += ' 0 %.4f %.4f %.4f %.4f %.4f %.4f  %.4f %s' %(-1, -1, -1, -1, -1, -1, -1, '-')

        print logstr
        fp.write(logstr+'\n')
    
    fp.close()
    os.chdir(original_path)
コード例 #6
0
ファイル: zodi.py プロジェクト: gbrammer/wfc3
def compare_zodi():
    """
    Compare minimum background flux in a visit to the computed zodi
    """
    import mywfc3.zodi
    from threedhst import catIO
    import glob
    from mywfc3.utils import gzfile

    filter = 'G141'
    asns = glob.glob('*%s_orbit.png' % (filter))
    colors = np.array(['blue', 'red', 'orange'])
    #colors = np.array(['blue', 'white', 'orange'])
    # fig = unicorn.plotting.plot_init(xs=6, aspect=0.7, left=0.12, right=0.12)
    # ax = fig.add_subplot(111)
    bg_min = np.ones(len(asns))
    zodi_predicted = bg_min * 1.
    nor = bg_min * 1.
    for i, asn in enumerate(asns):
        root = asn.split('_')[0]
        print root
        os.system('cat %s*%s_orbit.dat > /tmp/%s.dat' %
                  (root[:6], filter, root))
        bg1 = catIO.Readfile('/tmp/%s.dat' % (root),
                             save_fits=False,
                             force_lowercase=False)
        bg_min[i] = bg1.bg.min()
        files = glob.glob('%s*raw.fits*' % (root[:6]))
        zodi_predicted[i] = mywfc3.zodi.flt_zodi(gzfile(files[0]),
                                                 verbose=False)
        nor[i] = mywfc3.zodi.flt_zodi(gzfile(files[0]),
                                      verbose=False,
                                      pirzkal=True)

    plt.scatter(bg_min,
                zodi_predicted,
                alpha=0.4,
                color='black',
                label='Synphot')
    plt.scatter(bg_min, nor, alpha=0.4, color='orange', label='Nor')

    plt.plot([0, 4], [0, 4], color='red', alpha=0.2, linewidth=3)
    plt.ylim(0.5, 3.5)
    plt.xlim(0.5, 3.5)
    plt.xlabel('Background, visit minimum')
    plt.ylabel('Predicted zodi')
    plt.legend(loc='lower right', prop={'size': 10})

    plt.savefig('PredictedZodi_%s.pdf' % (filter))
コード例 #7
0
ファイル: prepare_fixed_flt.py プロジェクト: gbrammer/unicorn
def process_raw_all(field='AEGIS'):
    #### Reprocess *all* of the FLTs with variable backgrounds that
    #### weren't already refit above
    import glob
    import os

    import numpy as np

    import unicorn
    import threedhst
    from threedhst import catIO

    files = glob.glob('/3DHST/Spectra/Work/BACKGROUND/%s/*G141_orbit.dat' %
                      (field))
    redo_list = []
    for file in files:
        bg = catIO.Readfile(file, save_fits=False, force_lowercase=True)
        var_bg = np.ptp(bg.bg[1:]) > 0.15
        no_skip = True
        if os.path.exists('%sq_flt.fits' %
                          (os.path.split(file)[-1].split('j_')[0])):
            im2flt_key = threedhst.utils.gethead(
                '%sq_flt.fits' % (os.path.split(file)[-1].split('j_')[0]),
                keys=['IMA2FLT'])
            if im2flt_key[0] == '':
                no_skip = True
            else:
                no_skip = False
        rawfile = '%sq_raw.fits' % (os.path.split(file)[-1].split('j_')[0])
        print rawfile, np.ptp(bg.bg[1:]), var_bg, no_skip, var_bg & no_skip
        #
        if var_bg & no_skip:
            redo_list.append(rawfile)
            if not os.path.exists(rawfile):
                print '%s does not exist!' % (rawfile)
                continue
            #
            unicorn.prepare.make_IMA_FLT(raw=rawfile, pop_reads=[])
コード例 #8
0
def make_cat(symbol_color='red', masslim=(11, 13), CIRCULARIZE=True):
    import unicorn.galfit
    import threedhst
    import numpy as np

    sexCat = threedhst.sex.mySexCat('orient1_drz.cat')
    sexCat.re = sexCat.id * 0. - 99
    sexCat.n = sexCat.id * 0. - 99
    sexCat.bovera = sexCat.id * 0. - 99
    sexCat.chi2 = sexCat.id * 0. + 100

    for i, id in enumerate(sexCat.id):
        #print id
        try:
            root = 'orient1_%05d_galfit' % (id)
            params = unicorn.galfit.read_log(root + '.log')
            x0, y0, mag, re, n, bovera, chi2 = params
            sexCat.re[i] = np.float(re)
            sexCat.n[i] = np.float(n)
            sexCat.bovera[i] = np.float(bovera)
            sexCat.chi2[i] = np.float(chi2)
            #print re
        except:
            pass
        #
        try:
            root = 'orient2_%05d_galfit' % (id)
            params = unicorn.galfit.read_log(root + '.log')
            x0, y0, mag, re, n, bovera, chi2 = params
            sexCat.re[i] = np.float(re)
            sexCat.n[i] = np.float(n)
            sexCat.bovera[i] = np.float(bovera)
            sexCat.chi2[i] = np.float(chi2)
            #print re
        except:
            pass

    ##### Make plot of sizes vs. z
    match = catIO.Readfile(
        '../../../../COSMOS/HTML_v1.0/SED/orient1_match.cat')

    match1 = catIO.Readfile(
        '../../../../COSMOS/HTML_v1.0/SED/orient1_match.cat')
    q = []
    for id in match1.id_f140w:
        mat = np.where(sexCat.id == id)[0][0]
        q.append(mat)
    #
    match2 = catIO.Readfile(
        '../../../../COSMOS/HTML_v1.0/SED/orient2_match.cat')
    for id in match2.id_f140w:
        mat = np.where(sexCat.id == id)[0][0]
        q.append(mat)

    for col in match.columns:
        match[col] = np.append(match1[col], match2[col])

    q = np.array(q)

    fp = open('orient1_galfit.cat', 'w')
    fp.write(
        '# id_f140w re n bovera chi2 id_nmbs46 z_peak logm star_flag r_match  \n'
    )
    for i, qi in enumerate(q):
        fp.write(
            '%-5d %7.2f %7.2f %7.2f %9.1e  %-5d %8.3f %6.2f %1d  %5.2f\n' %
            (sexCat.id[qi], sexCat.re[qi], sexCat.n[qi], sexCat.bovera[qi],
             sexCat.chi2[qi], match.id_phot[i], match.z_peak[i], match.logm[i],
             match.star_flag[i], match.rmatch[i]))

    fp.close()

    zgrid = np.array([
        0.2, 0.3, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.8, 0.9, 1.0, 1.1,
        1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5
    ])
    scale = np.array([
        3.268, 4.421, 5.343, 5.733, 6.082, 6.394, 6.673, 6.922, 7.144, 7.518,
        7.812, 8.041, 8.216, 8.346, 8.439, 8.502, 8.539, 8.556, 8.555, 8.540,
        8.512, 8.475, 8.430, 8.377, 8.320, 8.257, 8.192
    ])

    use = (match.star_flag == 0) & (match.rmatch < 1)
    use = (match.logm > masslim[0]) & (match.logm < masslim[1])
    use = use & (sexCat.chi2[q] < 2)

    sint = np.interp(match.z_peak[use], zgrid, scale)

    xvd = np.array([0, 0.6, 1.1, 1.6, 2.0])
    yvd = np.array([12.4, 8.0, 5.3, 4.1, 3.0])

    if CIRCULARIZE:
        sexCat.re *= np.sqrt(1. / np.abs(sexCat.bovera))

    plt.semilogy(match.z_peak[use],
                 sexCat.re[q][use] * sint * 0.06,
                 marker='o',
                 linestyle='None',
                 color=symbol_color,
                 markersize=12,
                 alpha=0.7)
    print symbol_color
    plt.plot(xvd, yvd, marker='None', linewidth=10, alpha=0.3, color='black')

    #### Add GOODS-N
    # catalogs = glob.glob('/research/HST/GRISM/3DHST/GOODS-N/HTML_v1.0/*drz.cat')
    # tar_path = '/research/HST/GRISM/3DHST/GOODS-N/HTML_TAR/'
    # for catalog in catalogs:
    #     print catalog
    #     cat = threedhst.sex.mySexCat(catalog)
    #     mag = np.cast[float](cat.MAG_F1392W)
    #     use = mag < 23.5
    #     root=os.path.basename(catalog).split('_drz')[0]
    #     cat.re = cat.id*0.-99
    #     cat.n = cat.id*0.-99
    #     cat.bovera = cat.id*0.-99
    #     cat.chi2 = cat.id*0.+100
    #     for i,id in enumerate(cat.id):
    #         #print id
    #         try:
    #             rooti=root+'_%05d_galfit' %(id)
    #             params = unicorn.galfit.read_log(rooti+'.log')
    #             x0, y0, mag, re, n, bovera, chi2 = params
    #             cat.re[i] = np.float(re)
    #             cat.n[i] = np.float(n)
    #             cat.bovera[i] = np.float(bovera)
    #             cat.chi2[i] = np.float(chi2)
    #             #print re
    #         except:
    #             pass
    #     #
    #     match = catIO.Readfile('/research/HST/GRISM/3DHST/GOODS-N/HTML_v1.0/SED/'+root+'_match.cat')
    #     q = []
    #     for id in match.id_f140w:
    #         mat = np.where(cat.id == id)[0][0]
    #         q.append(mat)
    #     #
    #     q = np.array(q)
    #     #
    #     use = (match.rmatch < 1)
    #     use = (match.logm >= masslim[0]) & (match.logm <= masslim[1])
    #     use = use & (cat.chi2[q] < 2)
    #     sint = np.interp(match.z_peak[use], zgrid, scale)
    #     #
    #     if CIRCULARIZE:
    #         cat.re *= np.sqrt(1./np.abs(cat.bovera))
    #
    #     print match.z_peak[use], cat.re[q][use]*sint*0.06
    #
    #     plt.semilogy(match.z_peak[use], cat.re[q][use]*sint*0.06, marker='o', linestyle='None', color='green', markersize=10, alpha=0.6)

    # #### Add UDS
    catalogs = glob.glob('/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/*drz.cat')
    tar_path = '/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/images/'
    for catalog in catalogs[0:1]:
        cat = threedhst.sex.mySexCat(catalog)
        mag = np.cast[float](cat.MAG_F1249W)
        use = mag < 23.5
        root = os.path.basename(catalog).split('_drz')[0]
        cat.re = cat.id * 0. - 99
        cat.n = cat.id * 0. - 99
        cat.bovera = cat.id * 0. - 99
        cat.chi2 = cat.id * 0. + 100
        for i, id in enumerate(cat.id):
            #print id
            try:
                rooti = root + '_%05d_galfit' % (id)
                params = unicorn.galfit.read_log(rooti + '.log')
                x0, y0, mag, re, n, bovera, chi2 = params
                cat.re[i] = np.float(re)
                cat.n[i] = np.float(n)
                cat.bovera[i] = np.float(bovera)
                cat.chi2[i] = np.float(chi2)
                #print re
            except:
                pass
        #
        match = catIO.Readfile(
            '/research/HST/GRISM/3DHST/SN-MARSHALL/HTML/SED/' + root +
            '_match.cat')
        q = []
        for id in match.id_f140w:
            mat = np.where(cat.id == id)[0][0]
            q.append(mat)
        #
        q = np.array(q)
        #
        use = (match.star_flag == 0) & (match.rmatch < 1)
        use = (match.logm >= masslim[0]) & (match.logm <= masslim[1])
        use = use & (cat.chi2[q] < 2)
        sint = np.interp(match.z_peak[use], zgrid, scale)

        print match.z_peak[use]
        if CIRCULARIZE:
            cat.re *= np.sqrt(1. / np.abs(cat.bovera))
        #
        plt.plot(match.z_peak[use],
                 cat.re[q][use] * sint * 0.06,
                 marker='o',
                 linestyle='None',
                 color='orange',
                 markersize=10,
                 alpha=0.6)

    plt.ylim(0.5, 30)
    #plt.ylim(-5,25)

    plt.xlim(0, 3.5)
    plt.xlabel(r'$z_\mathrm{phot}$')
    plt.ylabel(r'$r_e$ [kpc]')
    plt.text(
        2.0, 13, r'F140W, $%4.1f\ <\ \log\ M/M_\odot\ <\ %4.1f$' %
        (masslim[0], masslim[1]))
    plt.savefig('3dhst_cosmos_sizes.png')
コード例 #9
0
ファイル: intersim.py プロジェクト: gbrammer/unicorn
def show_results(use_tex=False):
    import threedhst.catIO as catIO

    stats = catIO.Readfile('all_simspec.dat')
    ha_model, s2_model = unicorn.intersim.get_line_fluxes(z0=1.0,
                                                          mag=stats.mag)

    xstar = [14.5, 24.1]
    ystar = [3.00, 2.13]
    yi = np.interp(stats.mag, xstar, ystar)
    #plt.scatter(stats.mag, yi, s=0.1, color='black')
    is_star = stats.r50 < yi
    plt.scatter(stats.mag[is_star], stats.r50[is_star], alpha=0.5)
    plt.scatter(stats.mag[~is_star],
                stats.r50[~is_star],
                alpha=0.2,
                color='red')

    #### Color by r50/r90 concentration
    concentration = stats.r50 / stats.r90
    msize = np.maximum((concentration / 0.2)**4, 4)
    mcol = np.minimum((np.maximum(concentration, 0.3) - 0.3) / 0.2, 1)
    plt.scatter(stats.mag, concentration, c=mcol, alpha=0.5)

    mcol = np.minimum(np.log10(stats.r50 - 1.1), 1)

    stats.sky_avg += np.random.normal(size=stats.sky_avg.shape) * 0.01
    sky_col = np.minimum((stats.sky_avg - 0.8) / 0.8, 1)
    plt.scatter(stats.mag, stats.sky_avg, c=sky_col, alpha=0.5)

    #### Continuum depth

    BINWIDTH = 92
    bin_sn = np.sqrt(BINWIDTH / 22)
    binned = stats.continuum_sn * bin_sn

    #### Get correction functions
    xm, ym, ys, nn = threedhst.utils.runmed(stats.mag, binned, NBIN=80)
    ymag = np.interp(stats.mag, xm, ym)

    sub = (stats.mag > 19) & (stats.mag < 22.5) & (stats.continuum_sn > 0) & (
        stats.ha_flux > 0)  #& (~is_star)

    xm, ym, ys, nn = threedhst.utils.runmed(stats.r50[sub],
                                            (binned / ymag)[sub],
                                            NBIN=20)
    ysize = np.interp(stats.r50, xm, ym)

    xm, ym, ys, nn = threedhst.utils.runmed(stats.sky_avg[sub],
                                            (binned / ymag / ysize)[sub],
                                            NBIN=25)
    ysky = np.interp(stats.sky_avg, xm, ym)

    xm, ym, ys, nn = threedhst.utils.runmed(
        concentration[sub], (binned / ymag / ysize / ysky)[sub], NBIN=25)
    ycons = np.interp(concentration, xm, ym)

    fig = unicorn.catalogs.plot_init(xs=8,
                                     aspect=1. / 4,
                                     left=0.07,
                                     use_tex=use_tex)
    #fig.subplots_adjust(wspace=0.27, hspace=0.25, left=0.12)  # 2x2
    fig.subplots_adjust(wspace=0.38, hspace=0.25, left=0.074, bottom=0.22)

    si = 4
    mark = 'o'
    cmap = cm.jet
    bins = [80, 80]

    ax = fig.add_subplot(141)

    #plt.scatter(stats.mag, stats.continuum_sn*bin_sn, alpha=0.5, c=mcol)
    use = np.isfinite(binned) & (binned > 0)
    #plt.scatter(stats.mag[use], (binned/ysize/ysky)[use], alpha=0.5, c=mcol[use], s=si, marker=mark)
    unicorn.intersim.show_hist_contour(stats.mag[use],
                                       (binned / ysize / ysky)[use],
                                       axrange=[[20, 24], [0.5, 100]],
                                       ylog=True,
                                       cmap=cmap,
                                       bins=bins)

    xm, ym, ys, nn = threedhst.utils.runmed(stats.mag[use],
                                            (binned / ysize / ysky)[use],
                                            NBIN=80)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.ylim(0.5, 100)
    plt.plot([20, 24], [5, 5], color='black', alpha=0.4)
    plt.xlim(20, 24)
    plt.semilogy()
    if use_tex:
        plt.xlabel(r'MAG\_AUTO $m_{140}$')
    else:
        plt.xlabel(r'MAG_AUTO $m_{140}$')

    plt.ylabel('continuum S/N')
    ax.xaxis.set_major_locator(unicorn.analysis.MyLocator(6, integer=True))
    ax.xaxis.set_minor_locator(MultipleLocator(0.5))
    ax.set_yticks([1, 10, 100])
    ax.set_yticklabels(['1', '10', '100'])

    sn5_limit = np.interp(5, ym[::-1], xm[::-1])
    print 'Continuum, S/N=5 @ %.3f' % (sn5_limit)
    print threedhst.utils.biweight(stats.r50[sub], both=True)

    ax = fig.add_subplot(142)

    #plt.scatter(stats.r50[sub], (binned/ymag/ysky)[sub], c=mcol[sub], alpha=0.5, s=si)
    unicorn.intersim.show_hist_contour(stats.r50[sub] * 0.06,
                                       (binned / ymag / ysky)[sub],
                                       axrange=[[0, 20 * 0.06], [0.3, 1.7]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(stats.r50[sub] * 0.06,
                                            (binned / ymag / ysky)[sub],
                                            NBIN=20)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.fill_betweenx([0, 10], [1.7 * 0.06, 1.7 * 0.06],
                      [2.5 * 0.06, 2.5 * 0.06],
                      alpha=0.15,
                      color='black')
    #plt.xlabel(r'$R_{50}$ [$0.\!\!^{\prime\prime}06$ pix]')
    plt.xlabel(r'$R_{50}$ [arcsec]')
    plt.ylabel(r'$\delta$ cont. S/N')
    plt.ylim(0.3, 1.7)
    #plt.ylim(0.3,2.5)
    plt.xlim(0, 15 * 0.06)
    majorLocator = MultipleLocator(0.2)
    minorLocator = MultipleLocator(0.1)

    ax.xaxis.set_major_locator(majorLocator)
    ax.xaxis.set_minor_locator(minorLocator)

    # x0 = np.interp(1,ym[::-1],xm[::-1])
    # plt.plot(xm,(x0/xm), color='red')
    # plt.plot(xm,(x0/xm)**0.5, color='red')

    x0 = np.interp(1, ym[::-1], xm[::-1])
    plt.plot(xm, (x0 / xm)**(0.5), color='white', alpha=0.5, linewidth=2)
    plt.plot(xm, (x0 / xm)**(0.5), color='red', alpha=0.8)

    ysize = np.interp(stats.r50 * 0.06, xm, ym)

    # plt.scatter(stats.r50[sub], (binned/ymag/ysize)[sub], c=sky_col[sub], alpha=0.5)
    # xm, ym, ys, nn = threedhst.utils.runmed(stats.r50[sub], (binned/ymag/ysize)[sub], NBIN=10)
    # plt.plot(xm, ym, linewidth=2, color='black', alpha=0.5)

    ax = fig.add_subplot(143)

    #plt.scatter(stats.sky_avg[sub], (binned/ymag/ysize)[sub], c=mcol[sub], alpha=0.5, s=si)
    unicorn.intersim.show_hist_contour(stats.sky_avg[sub],
                                       (binned / ymag / ysize)[sub],
                                       axrange=[[0.5, 3.5], [0.3, 1.7]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(stats.sky_avg[sub],
                                            (binned / ymag / ysize)[sub],
                                            NBIN=25)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.ylim(0.3, 1.7)
    plt.xlim(0.5, 3.5)
    plt.xlabel(r'Background [e$^-$ / s]')
    plt.ylabel(r'$\delta$ cont S/N')
    ax.xaxis.set_major_locator(unicorn.analysis.MyLocator(6, integer=True))

    x0 = np.interp(1, ym[::-1], xm[::-1])
    plt.plot(xm, (x0 / xm)**(0.5), color='white', alpha=0.5, linewidth=2)
    plt.plot(xm, (x0 / xm)**(0.5), color='red', alpha=0.7)

    ysky = np.interp(stats.sky_avg, xm, ym)

    ### Very little residual trend with concentration
    ax = fig.add_subplot(144)

    #plt.scatter(concentration[sub], (binned/ymag/ysize/ysky)[sub], c=mcol[sub], s=si, alpha=0.5)
    unicorn.intersim.show_hist_contour(concentration[sub],
                                       (binned / ymag / ysize / ysky)[sub],
                                       axrange=[[0.25, 0.60], [0.3, 1.7]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(
        concentration[sub], (binned / ymag / ysize / ysky)[sub], NBIN=25)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.xlim(0.25, 0.60)
    plt.ylim(0.3, 1.7)
    #plt.ylim(0.5,1.5)
    plt.xlabel(r'$C = R_{50}/R_{90}$')
    plt.ylabel(r'$\delta$ cont S/N')
    #ax.xaxis.set_major_locator(unicorn.analysis.MyLocator(5, prune=None))
    ax.xaxis.set_major_locator(MultipleLocator(0.1))

    ycons = np.interp(concentration, xm, ym)

    plt.savefig('grism_cont_sensitivity.pdf')

    # #### Test
    # plt.scatter(stats.mag, binned, alpha=0.5, c=sky_col, s=4)
    # xm, ym, ys, nn = threedhst.utils.runmed(stats.mag, binned, NBIN=80)
    # plt.errorbar(xm, ym, ys, linewidth=2, color='black', alpha=0.5)
    # plt.ylim(0.1,2000)
    # plt.plot([17,24],[5,5], color='black', alpha=0.4)
    # plt.xlim(17,24)
    # plt.semilogy()

    #### Line fluxes
    ha_sn = stats.ha_flux / stats.ha_flux_err

    show = np.isfinite(ha_sn) & (ha_sn > 0) & (stats.ha_flux > 0)

    xm, ym, ys, nn = threedhst.utils.runmed(stats.ha_flux[~is_star & show],
                                            ha_sn[~is_star & show],
                                            NBIN=25)
    yline_flux = np.interp(stats.ha_flux, xm, ym)

    #sub = (stats.ha_flux > 6) & (stats.ha_flux < 100) & (stats.mag > 18) & (np.isfinite(ha_sn)) # & (~is_star)
    #sub = (stats.mag > 19) & (stats.mag < 22.5) & (stats.continuum_sn > 0) & (stats.ha_flux > 0) #& (~is_star)

    xm, ym, ys, nn = threedhst.utils.runmed(stats.r50[sub],
                                            (ha_sn / yline_flux)[sub],
                                            NBIN=30)
    yline_r50 = np.interp(stats.r50, xm, ym)

    xm, ym, ys, nn = threedhst.utils.runmed(
        stats.sky_avg[sub], (ha_sn / yline_flux / yline_r50)[sub], NBIN=20)
    yline_sky = np.interp(stats.sky_avg, xm, ym)

    xm, ym, ys, nn = threedhst.utils.runmed(concentration[sub],
                                            (ha_sn / yline_flux / yline_r50 /
                                             yline_sky)[sub],
                                            NBIN=10)
    yline_con = np.interp(concentration, xm, ym)

    plt.errorbar(ha_model,
                 stats.ha_flux,
                 stats.ha_flux_err,
                 marker='o',
                 markersize=0.1,
                 linestyle='None',
                 color='0.5')
    plt.scatter(ha_model, stats.ha_flux, c=mcol, zorder=100, alpha=0.5)
    #plt.scatter(stats.s2_flux, s2_model, alpha=0.8, c=mc)
    plt.plot([0.1, 1000], [0.1, 1000], color='black', alpha=0.5)
    plt.xlim(0.5, 1000)
    plt.ylim(0.5, 1000)
    plt.loglog()

    # 2x2
    #fig = unicorn.catalogs.plot_init(xs=5.5, aspect=1, left=0.08)
    #fig.subplots_adjust(wspace=0.27, hspace=0.25, left=0.12)
    fig = unicorn.catalogs.plot_init(xs=8,
                                     aspect=1. / 4,
                                     left=0.07,
                                     use_tex=use_tex)
    fig.subplots_adjust(wspace=0.38, hspace=0.25, left=0.074, bottom=0.22)

    ax = fig.add_subplot(141)

    si = 4

    show = np.isfinite(ha_sn) & (ha_sn > 0) & (stats.ha_flux > 0)
    #plt.scatter(stats.ha_flux[show], ha_sn[show], c=mcol[show], s=si, zorder=100, alpha=0.3)
    unicorn.intersim.show_hist_contour(stats.ha_flux[show],
                                       (ha_sn / yline_r50 / yline_sky /
                                        yline_con)[show],
                                       axrange=[[0.5, 100], [0.5, 100]],
                                       bins=bins,
                                       cmap=cmap,
                                       xlog=True,
                                       ylog=True)
    xm, ym, ys, nn = threedhst.utils.runmed(stats.ha_flux[~is_star & show],
                                            (ha_sn / yline_r50 / yline_sky /
                                             yline_con)[~is_star & show],
                                            NBIN=25)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.plot([0.5, 100], [5, 5], color='black', alpha=0.4)
    plt.xlim(0.5, 100)
    plt.ylim(0.5, 100)
    plt.loglog()
    plt.xlabel(r'line flux [$10^{-17}$ ergs / s / cm$^2$]')
    plt.ylabel('line S/N')

    ax.set_yticks([1, 10, 100])
    ax.set_yticklabels(['1', '10', '100'])
    ax.set_xticks([1, 10, 100])
    ax.set_xticklabels(['1', '10', '100'])

    sn5_limit = np.interp(5, ym, xm)
    print 'Line, S/N=5 @ %.3e' % (sn5_limit)
    print threedhst.utils.biweight(stats.r50[sub], both=True)

    yline_flux = np.interp(stats.ha_flux, xm, ym)
    #plt.scatter(stats.ha_flux, ha_sn/yline_flux, c=mcol, alpha=0.2)

    #### Nice:  line flux with respect to concentration after taking out the overall trend with
    #### line strength
    ax = fig.add_subplot(142)

    #plt.scatter(stats.r50[sub], (ha_sn/yline_flux)[sub], c=mcol[sub], s=si, alpha=0.3)
    unicorn.intersim.show_hist_contour(stats.r50[sub] * 0.06,
                                       (ha_sn / yline_flux / yline_sky /
                                        yline_con)[sub],
                                       axrange=[[0, 15 * 0.06], [0.3, 2.5]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(stats.r50[sub] * 0.06,
                                            (ha_sn / yline_flux / yline_sky /
                                             yline_con)[sub],
                                            NBIN=30)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20 * 0.06], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.fill_betweenx([0, 10], [1.7 * 0.06, 1.7 * 0.06],
                      [2.5 * 0.06, 2.5 * 0.06],
                      alpha=0.15,
                      color='black')
    plt.ylim(0.3, 2.5)
    plt.xlim(0, 15 * 0.06)
    #plt.xlabel(r'$R_{50}$ [$0.\!\!^{\prime\prime}06$ pix]')
    plt.ylabel(r'$\delta$ line S/N')
    #plt.semilogy()
    # x0 = np.interp(1,ym[::-1],xm[::-1])
    # plt.plot(xm,(x0/xm), color='red')
    # plt.plot(xm,(x0/xm)**0.5, color='red')

    plt.xlabel(r'$R_{50}$ [arcsec]')
    ax.xaxis.set_major_locator(MultipleLocator(0.2))
    ax.xaxis.set_minor_locator(MultipleLocator(0.1))

    x0 = np.interp(1, ym[::-1], xm[::-1])
    plt.plot(xm, (x0 / xm)**(0.5), color='red', alpha=0.7)

    yline_r50 = np.interp(stats.r50 * 0.06, xm, ym)

    ax = fig.add_subplot(143)

    #plt.scatter(stats.sky_avg[sub], (ha_sn/yline_flux/yline_r50)[sub], c=mcol[sub], s=si, alpha=0.3)
    unicorn.intersim.show_hist_contour(stats.sky_avg[sub],
                                       (ha_sn / yline_flux / yline_r50 /
                                        yline_con)[sub],
                                       axrange=[[0.5, 3.5], [0.3, 1.7]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(stats.sky_avg[sub],
                                            (ha_sn / yline_flux / yline_r50 /
                                             yline_con)[sub],
                                            NBIN=20)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.ylim(0.3, 1.7)
    plt.xlim(0.5, 3.5)
    plt.xlabel(r'Background [e$^-$ / s]')
    plt.ylabel(r'$\delta$ line S/N')
    ax.xaxis.set_major_locator(unicorn.analysis.MyLocator(6, integer=True))

    yline_sky = np.interp(stats.sky_avg, xm, ym)

    x0 = np.interp(1, ym[::-1], xm[::-1])
    plt.plot(xm, (x0 / xm)**(0.5), color='red', alpha=0.7)

    ax = fig.add_subplot(144)

    #plt.scatter(concentration[sub], (ha_sn/yline_flux/yline_r50/yline_sky)[sub], c=mcol[sub], s=si, alpha=0.3)
    unicorn.intersim.show_hist_contour(concentration[sub],
                                       (ha_sn / yline_flux / yline_r50 /
                                        yline_sky)[sub],
                                       axrange=[[0.25, 0.60], [0.3, 1.7]],
                                       bins=bins,
                                       cmap=cmap)
    xm, ym, ys, nn = threedhst.utils.runmed(concentration[sub],
                                            (ha_sn / yline_flux / yline_r50 /
                                             yline_sky)[sub],
                                            NBIN=10)
    plt.plot(xm, ym, linewidth=2, color='white', alpha=0.5, zorder=100)
    plt.plot(xm, ym, linewidth=1, color='black', alpha=0.8, zorder=100)
    plt.plot([0, 20], [1, 1],
             linewidth=1,
             alpha=0.4,
             zorder=101,
             color='black')
    plt.xlim(0.25, 0.60)
    plt.ylim(0.3, 1.7)
    plt.xlabel(r'$C = R_{50}/R_{90}$')
    plt.ylabel(r'$\delta$ line S/N')
    ax.xaxis.set_major_locator(MultipleLocator(0.1))

    yline_con = np.interp(concentration, xm, ym)

    plt.savefig('grism_line_sensitivity.pdf')

    # #### Test:
    # show = (np.isfinite(ha_sn)) & (stats.ha_flux > 0)
    # plt.scatter(stats.ha_flux[show], (ha_sn/yline_sky)[show], c=mcol[show], zorder=100, alpha=0.2)
    # xm, ym, ys, nn = threedhst.utils.runmed(stats.ha_flux[show],  (ha_sn/yline_sky)[show], NBIN=25)
    # plt.plot(xm, ym, linewidth=2, color='black', alpha=0.5, zorder=100)
    # plt.plot([0.5,1000],[5,5], color='black', alpha=0.4)
    # plt.xlim(0.5,1000)
    # plt.ylim(0.5,300)
    # plt.loglog()

    #plt.semilogy()

    #
    plt.scatter(stats.mag, stats.ha_flux, c=mcol, zorder=100, alpha=0.5)
    plt.ylim(0.1, 5000)
    plt.semilogy()

    #### EQW
    dha = stats.ha_eqw - 130.
    hy, hx, hh = plt.hist(dha / stats.ha_eq_err,
                          range=(-5, 5),
                          bins=50,
                          alpha=0.7)
    threedhst.utils.biweight(dha / stats.ha_eq_err, both=True)

    #### redshift
    dz = (stats.z_fit - 1) / 2.
    plt.scatter(stats.mag, dz, c=mcol, alpha=0.5)

    plt.scatter(stats.ha_flux, dz, c=mcol, alpha=0.5)
    plt.xlim(0.1, 5000)
    plt.semilogx()

    #### surface density
    mu = stats.mag - 2 * np.log(stats.r90 * 0.06)
    plt.scatter(stats.mag, mu, c=mcol)
コード例 #10
0
ファイル: intersim.py プロジェクト: gbrammer/unicorn
def get_results(force_new=False):
    """
    Collate the results from the simulated spectra and the input catalogs into single output 
    catalogs suitable for reading and plotting.
    
    for field in ['AEGIS','COSMOS','UDS','GOODS-S']:
        os.chdir(unicorn.GRISM_HOME+'%s/PREP_FLT' %(field))
        unicorn.intersim.get_results()
    
    os.chdir(unicorn.GRISM_HOME+'SIMULATIONS')
    status = os.system('cat ../AEGIS/PREP_FLT/simspec.dat ../COSMOS/PREP_FLT/simspec.dat ../GOODS-S/PREP_FLT/simspec.dat ../UDS/PREP_FLT/simspec.dat > all_simspec.dat')
    
    """

    import threedhst.catIO as catIO

    files = glob.glob('*linefit.dat')

    cat = None

    if (not os.path.exists('simspec.dat')) | force_new:
        fp = open('simspec.dat', 'w')
        fp.write(
            '# object sky_avg sky_lo sky_hi mag r50 r90 z_fit continuum_sn ha_flux ha_flux_err ha_eqw ha_eq_err s2_flux s2_flux_err s2_eqw s2_eq_err\n'
        )
        fp.write('dummy 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n')
        fp.close()

    log = catIO.Readfile('simspec.dat')

    for ii, file in enumerate(files):
        root = file.split('.linefit')[0]
        print unicorn.noNewLine + '%s (%d/%d)' % (root, ii + 1, len(files))
        if root in log.object:
            continue
        #
        fp = open('simspec.dat', 'a')
        pointing = root.split('_')[0]
        id = int(root.split('_')[1])
        if cat is None:
            cat = threedhst.sex.mySexCat(pointing + '_inter.cat')
            ### Get sky background
            asn = threedhst.utils.ASNFile(pointing + '-G141_asn.fits')
            bg = []
            for exp in asn.exposures:
                flt = pyfits.open(exp + '_flt.fits')
                bg.append(flt[0].header['SKYSCALE'])
            #
            bg_avg = np.mean(bg)
            bg_lo = np.min(bg)
            bg_hi = np.max(bg)
        else:
            if not cat.filename.startswith(pointing + '-'):
                cat = threedhst.sex.mySexCat(pointing + '_inter.cat')
                asn = threedhst.utils.ASNFile(pointing + '-G141_asn.fits')
                bg = []
                for exp in asn.exposures:
                    flt = pyfits.open(exp + '_flt.fits')
                    bg.append(flt[0].header['SKYSCALE'])
                #
                bg_avg = np.mean(bg)
                bg_lo = np.min(bg)
                bg_hi = np.max(bg)
        #
        gris = unicorn.interlace_fit.GrismSpectrumFit(root, verbose=False)
        if not gris.status:
            fp.close()
            continue
        #
        result = gris.stats()
        if result is False:
            fp.close()
            continue
        #
        DIRECT_MAG, Q_Z, F_COVER, F_FLAGGED, MAX_CONTAM, INT_CONTAM, F_NEGATIVE = result
        #
        lwindow = (gris.oned.data.wave > 1.4e4) & (gris.oned.data.wave < 1.6e4)
        if (lwindow.sum() < 10) | (INT_CONTAM > 0.3):
            fp.close()
            continue
        #
        continuum_sn = np.median(
            (gris.oned.data.flux / gris.oned.data.error)[lwindow])
        #
        lfit = catIO.Readfile(root + '.linefit.dat')
        if lfit.status is None:
            fp.close()
            continue
        #
        if 'Ha' in lfit.line:
            ix = np.arange(len(lfit.line))[lfit.line == 'Ha'][0]
            ha_flux, ha_flux_err, ha_eqw, ha_eqw_err = lfit.flux[
                ix], lfit.error[ix], lfit.eqw_obs[ix], lfit.eqw_obs_err[ix]
        else:
            ha_flux, ha_flux_err, ha_eqw, ha_eqw_err = -1, -1, -1, -1
        #
        if 'SII' in lfit.line:
            ix = np.arange(len(lfit.line))[lfit.line == 'SII'][0]
            s2_flux, s2_flux_err, s2_eqw, s2_eqw_err = lfit.flux[
                ix], lfit.error[ix], lfit.eqw_obs[ix], lfit.eqw_obs_err[ix]
        else:
            s2_flux, s2_flux_err, s2_eqw, s2_eqw_err = -1, -1, -1, -1
        #
        ic = np.arange(cat.nrows)[cat.id == id][0]
        fp.write(
            ' %s  %5.2f %5.2f %5.2f  %6.3f  %6.2f %6.2f %6.4f %6.2f  %6.2f %6.2f %6.2f %6.2f   %6.2f %6.2f %6.2f %6.2f\n'
            % (root, bg_avg, bg_lo, bg_hi, DIRECT_MAG,
               float(cat.FLUX_RADIUS[ic]), float(cat.FLUX_RADIUS2[ic]),
               gris.z_max_spec, continuum_sn, ha_flux, ha_flux_err, ha_eqw,
               ha_eqw_err, s2_flux, s2_flux_err, s2_eqw, s2_eqw_err))
        #
        fp.close()
コード例 #11
0
def check_fast(object='AEGIS-1-G141_00497',
               wmin=2000,
               wmax=2.4e4,
               logx=False,
               image_type='png'):

    if object.startswith('GOODS-S') | object.startswith(
            'WFC3') | object.startswith('GEORGE') | object.startswith('PRIMO'):
        abzp = 23.86
    else:
        abzp = 25

    obs_sed = catIO.Readfile('ASCII/%s_obs_sed.dat' % (object))
    temp_sed = catIO.Readfile('ASCII/%s_temp_sed.dat' % (object))

    lc = obs_sed.lc
    dlam_spec = lc[-1] - lc[-2]
    is_spec = np.append(
        np.abs(1 - np.abs(lc[1:] - lc[0:-1]) / dlam_spec) < 0.05, True)

    obs_convert = 10**(-0.4 * (abzp + 48.6)) * 3.e18 / lc**2 / 10.**-19

    fig = unicorn.catalogs.plot_init(square=True,
                                     xs=5,
                                     aspect=2. / 3,
                                     left=0.12)

    ax = fig.add_subplot(111)

    ax.plot(lc[~is_spec],
            obs_sed.fnu[~is_spec] * obs_convert[~is_spec],
            marker='o',
            color='orange',
            linestyle='None',
            markersize=15,
            alpha=0.7)
    ax.plot(lc[is_spec],
            obs_sed.fnu[is_spec] * obs_convert[is_spec],
            color='blue',
            alpha=0.5)
    ax.plot(lc[~is_spec],
            obs_sed.obs_sed[~is_spec] * obs_convert[~is_spec],
            color='red',
            alpha=0.7,
            marker='o',
            linestyle='None',
            markersize=8)

    temp_convert = 10**(-0.4 *
                        (abzp + 48.6)) * 3.e18 / temp_sed.lam**2 / 10.**-19
    flam_temp = temp_sed.fnu_temp * temp_convert

    ax.plot(temp_sed.lam, flam_temp, color='red', alpha=0.3)
    #fast_norm = 1./np.interp(2.2e4, wfast, tfast)*np.interp(2.2e4, temp_sed.lam, flam_temp)

    wfast, tfast = np.loadtxt('FAST_OUTPUT/BEST_FITS/%s_threedhst_1.fit' %
                              (object),
                              skiprows=1,
                              unpack=True)
    ax.plot(wfast, tfast, color='green', alpha=0.5)

    wfast, tfast = np.loadtxt('FAST_OUTPUT/BEST_FITS/%s_threedhst_2.fit' %
                              (object),
                              skiprows=1,
                              unpack=True)
    ax.plot(wfast, tfast, color='purple', alpha=0.5)

    if logx:
        ax.semilogx()
    ax.set_xlim(wmin, wmax)

    ymax = np.max(obs_sed.fnu * obs_convert)
    ax.set_ylim(-0.1 * ymax, 1.3 * ymax)

    fout = catIO.Readfile('FAST_OUTPUT/%s_threedhst.fout' % (object))

    ax.set_xlabel(r'$\lambda$')
    ax.set_ylabel(r'$f_\lambda\ (10^{-19}$)')
    xtext = 2e4
    xal = 'right'

    ax.text(xtext,
            0.4 * ymax,
            r'$z_\mathrm{gris}=%.4f$' % (fout.z[0]),
            horizontalalignment=xal)

    ax.text(3000, 1.1 * ymax, object)

    ax.text(xtext,
            0.3 * ymax,
            r'log M: $%.1f^{\ %.1f}_{\ %.1f}$   $%.1f^{\ %.1f}_{\ %.1f}$' %
            (fout.lmass[0], fout.u68_lmass[0], fout.l68_lmass[0],
             fout.lmass[1], fout.u68_lmass[1], fout.l68_lmass[1]),
            horizontalalignment=xal)

    ax.text(xtext,
            0.2 * ymax,
            r'$A_V$: $%.1f^{\ %.1f}_{\ %.1f}$   $%.1f^{\ %.1f}_{\ %.1f}$' %
            (fout.av[0], fout.u68_av[0], fout.l68_av[0], fout.av[1],
             fout.u68_av[1], fout.l68_av[1]),
            horizontalalignment=xal)

    ax.text(
        xtext,
        0.1 * ymax,
        r'log $\tau$: $%.1f^{\ %.1f}_{\ %.1f}$    $%.1f^{\ %.1f}_{\ %.1f}$' %
        (fout.ltau[0], fout.u68_ltau[0], fout.l68_ltau[0], fout.ltau[1],
         fout.u68_ltau[1], fout.l68_ltau[1]),
        horizontalalignment=xal)

    ax.text(xtext,
            0.0 * ymax,
            r'log Age: $%.1f^{\ %.1f}_{\ %.1f}$    $%.1f^{\ %.1f}_{\ %.1f}$' %
            (fout.lage[0], fout.u68_lage[0], fout.l68_lage[0], fout.lage[1],
             fout.u68_lage[1], fout.l68_lage[1]),
            horizontalalignment=xal)

    ### Save to PNG
    outfile = object + '_fast.' + image_type

    if USE_PLOT_GUI:
        fig.savefig(outfile, dpi=100, transparent=False)
    else:
        canvas = FigureCanvasAgg(fig)
        canvas.print_figure(outfile, dpi=100, transparent=False)

    plt.close()
コード例 #12
0
ファイル: background.py プロジェクト: gbrammer/unicorn
def make_imaging_flat():
    """
    Make average background images with object masks
    """
    from pyraf import iraf

    #files = glob.glob('ibhm*flt.seg.fits')
    #PATH = ('/3DHST/Spectra/Work/%s/RAW/' %(field))*len(files)

    ###################### Grism sky backgrounds
    filter, flat_file = 'G141', 'u4m1335mi_pfl.fits'

    flat = pyfits.open(IREF + '/' + flat_file)[1].data[
        5:-5, 5:-5] / pyfits.open(IREF + '/flat.IR_avg.fits')[1].data[5:-5,
                                                                      5:-5]
    flat[flat <= 0] = 5
    flat[flat > 5] = 5

    ##################### Direct flat-field
    filter, flat_file = 'F140W', 'uc721143i_pfl.fits'

    filter, flat_file = 'F125W', 'uc72113qi_pfl.fits'

    filter, flat_file = 'F160W', 'uc721145i_pfl.fits'

    filter, flat_file = 'F105W', 'uc72113oi_pfl.fits'

    flat = pyfits.open(IREF + '/' + flat_file)[1].data[5:-5, 5:-5]

    flat[flat <= 0] = 5
    flat[flat > 5] = 5

    ############### 3D-HST
    os.chdir("/3DHST/Spectra/Work/Background")
    fields = ['COSMOS', 'GOODS-N', 'GOODS-S', 'AEGIS', 'UDS']
    PREP_FLT = '/3DHST/Spectra/Work/xxx/PREP_FLT/'
    RAW = '/3DHST/Spectra/Work/xxx/RAW/'

    ############### CANDELS
    os.chdir('/Users/gbrammer/CANDELS/Flats/')
    fields = ['GOODS-S', 'EGS', 'UDS']
    PREP_FLT = '/Users/gbrammer/CANDELS/xxx/PREP_FLT/'
    RAW = '/Users/gbrammer/CANDELS/xxx/RAW/'

    PATHS = []
    files = []
    file_field = []

    for field in fields:
        info = catIO.Readfile(PREP_FLT.replace('xxx', field) + 'files.info')
        field_files = info.file[info.filter == filter]
        files.extend(field_files)
        PATHS.extend([RAW.replace('xxx', field)] * len(field_files))
        file_field.extend([field] * len(field_files))

    ##################

    NF = len(files)
    idx = np.arange(NF)

    ## Otherwise get it from "show_profile" above
    test = idx > -10

    fp = open('background.%s.dat' % (filter), 'w')

    for j, i in enumerate(idx):
        if ~test[i]:
            continue
        #
        fi = files[i]
        if not os.path.exists(fi.replace('flt', 'flt.seg')):
            continue
        #
        if os.path.exists(fi.replace('.gz', '') + '.mask.reg'):
            continue
        #
        flt = pyfits.open(PATHS[i] + files[i])
        flt[1].data *= flat
        print unicorn.noNewLine + '%d %s %s' % (i, files[i],
                                                flt[0].header['PFLTFILE'])
        #
        ### Segmentation mask
        masked = pyfits.open(fi.replace('flt', 'flt.seg'))[0].data == 0
        ### DQ mask, hot pixels and the "death star"
        dq_ok = (flt[3].data & (4 + 32 + 16)) == 0
        #
        ok = masked & np.isfinite(flt[1].data) & (dq_ok)
        #flt[1].data /= np.median(flt[1].data[ok])
        level = threedhst.utils.biweight(flt[1].data[ok], mean=True)
        fp.write('%s %s %.3f\n' % (files[i].replace('flt', 'msk').replace(
            '.gz', ''), file_field[i], level))
        #
        #flt[1].data /= level
        #flt[1].data[(ok == False)] = 0
        #pyfits.writeto(files[i].replace('flt','msk').replace('.gz',''), flt[1].data, clobber=True, header=flt[1].header)

    fp.close()  ## background.dat

    #
    # nsum = np.sum(X != 0, axis=0).reshape(1014,1014)
    # avg = np.sum(X, axis=0).reshape(1014,1014)/nsum
    # sky = avg

    #### Use iraf.imcombine
    for field in fields:
        info = catIO.Readfile(PREP_FLT.replace('xxx', field) + 'files.info')
        field_files = info.file[info.filter == filter]
        if len(field_files) < 10:
            continue
        #
        fp = open('%s.%s.list' % (field, filter), 'w')
        for ff in field_files:
            msk = ff.replace('flt.fits.gz', 'msk.fits')
            if os.path.exists(msk):
                fp.write('%s\n' % (msk))
        fp.close()
        #
        iraf.imcombine(input='@%s.%s.list' % (field, filter),
                       output='combine.%s.%s' % (field, filter),
                       headers='',
                       bpmasks='',
                       rejmasks='',
                       nrejmasks='',
                       expmasks='',
                       sigmas='',
                       logfile='STDOUT',
                       combine='average',
                       reject='minmax',
                       project=iraf.no,
                       outtype='real',
                       outlimits='',
                       offsets='none',
                       masktype='none',
                       maskvalue='0',
                       blank=0.0,
                       scale='none',
                       zero='none',
                       weight='none',
                       statsec='',
                       expname='',
                       lthreshold=1e-06,
                       hthreshold=100.0,
                       nlow=5,
                       nhigh=5,
                       nkeep=1,
                       mclip=iraf.yes,
                       lsigma=3.0,
                       hsigma=3.0,
                       rdnoise='0.',
                       gain='1.',
                       snoise='0.',
                       sigscale=0.1,
                       pclip=-0.5)

    ##### Weight by the square of the background level (more flat signal for higher bg!)
    bg_flt, bg_field, bg = np.loadtxt('background.%s.dat' % (filter),
                                      dtype=np.str,
                                      unpack=True)
    weights = np.cast[float](bg)**2

    fp = open('%s.list' % (filter), 'w')
    fpw = open('%s.weight' % (filter), 'w')
    for msk, wht in zip(bg_flt, weights):
        if os.path.exists(msk):
            fp.write('%s\n' % (msk))
            fpw.write('%.2f\n' % (wht))

    fp.close()
    fpw.close()

    iraf.imcombine(input='@%s.list' % (filter),
                   output='combine.%s' % (filter),
                   headers='',
                   bpmasks='',
                   rejmasks='',
                   nrejmasks='',
                   expmasks='',
                   sigmas='',
                   logfile='STDOUT',
                   combine='average',
                   reject='minmax',
                   project=iraf.no,
                   outtype='real',
                   outlimits='',
                   offsets='none',
                   masktype='none',
                   maskvalue='0',
                   blank=0.0,
                   scale='none',
                   zero='none',
                   weight='@%s.weight' % (filter),
                   statsec='',
                   expname='',
                   lthreshold=1e-06,
                   hthreshold=100.0,
                   nlow=5,
                   nhigh=5,
                   nkeep=1,
                   mclip=iraf.yes,
                   lsigma=3.0,
                   hsigma=3.0,
                   rdnoise='0.',
                   gain='1.',
                   snoise='0.',
                   sigscale=0.1,
                   pclip=-0.5)

    ##### Final processing
    combined_files = glob.glob('combine*%s*fits' % (filter))
    for file in combined_files:
        sky = pyfits.open(file)[0].data
        #
        ##### Fix bad pixels
        if filter != 'G141':
            ratio = sky / flat
            stats = threedhst.utils.biweight(ratio[np.isfinite(ratio)],
                                             both=True)
            sky = sky / stats[0]
            max = stats[1] * 5
        else:
            max = 10
        #
        x, y = np.where((np.isfinite(sky) == False) | (sky / flat > (1 + max))
                        | (sky == 0))
        NX = len(x)
        print '%s: N_fix = %d' % (file, NX)
        pad = 1
        for i in range(NX):
            xi = x[i]
            yi = y[i]
            sub = sky[xi - pad:xi + pad + 2, yi - pad:yi + pad + 2]
            if (np.sum(sub) != 0.0):
                sky[xi, yi] = np.median(sub[np.isfinite(sub)])
        #
        still_bad = (np.isfinite(sky) == False) | (sky <= 0.01)
        sky[still_bad] = flat[still_bad]
        #
        #### for DIRECT flat
        if filter == 'G141':
            flatim = pyfits.open(unicorn.GRISM_HOME + 'CONF/sky_cosmos.fits')
            flatim[0].data = sky
            #flatim[3].data[5:-5,5:-5] = nsum
            flatim.writeto(file.replace('combine', 'sky'), clobber=True)
        else:
            flatim = pyfits.open(IREF + '/' + flat_file)
            flatim[1].data[5:-5, 5:-5] = sky
            #flatim[3].data[5:-5,5:-5] = nsum
            flatim.writeto(file.replace('combine', 'flat'), clobber=True)
コード例 #13
0
    def fit(self,
            ascii_file='AEGIS-3-G141_00177.dat',
            chi2_limit=1.5,
            trim_mtype=True,
            max_contam=0.05,
            xrange=(1.1e4, 1.65e4),
            img_type='png',
            flux_min=0):
        import threedhst.catIO as catIO
        import numpy as np

        ### handle v2.0 format spectra
        spec = catIO.Readfile(ascii_file)
        if 'trace' in spec.columns:
            spec.lam = spec.wave * 1.
            spec.flux /= spec.sensitivity
            spec.error /= spec.sensitivity
            spec.contam /= spec.sensitivity
            ascii_file = ascii_file.replace('.1D', '')
        else:
            spec.error /= 2.5

        spec.flux -= spec.contam

        self.spec = spec
        self.img_type = img_type

        chi2 = np.zeros(self.NTEMP)
        types = []
        anorm = chi2 * 0.

        use = (spec.lam > xrange[0]) & (spec.lam < xrange[1]) & (
            spec.contam / spec.flux < max_contam) & (np.isfinite(
                spec.flux)) & (spec.flux > flux_min)
        self.use = use

        if len(spec.lam[use]) < 50:
            return False

        for i in range(self.NTEMP):
            temp = self.templates[i]
            types.append(temp.type.strip())
            yint = np.interp(spec.lam[use], temp.wave, temp.flux)
            #
            anorm[i] = np.sum(yint * spec.flux[use] * spec.error[use]**
                              2) / np.sum(yint**2 * spec.error[use]**2)
            #
            chi2[i] = np.sum(
                (anorm[i] * yint - spec.flux[use])**2 / spec.error[use]**2)

        types = np.cast[str](types)
        DOF = len(yint) - 1
        chi2 /= DOF

        min = np.where(chi2 == chi2.min())[0][0]

        if trim_mtype:
            trim_mtype = types[min].startswith('M')

        if (chi2.min() < chi2_limit) & (~trim_mtype):
            print unicorn.noNewLine + ascii_file + ' * ' + ' %s %0.2f' % (
                types[min], chi2.min())
            self.spec = spec
            self.types = types
            self.chi2 = chi2
            self.anorm = anorm
            self.ascii_file = ascii_file
            self.make_plot()
        else:
            print unicorn.noNewLine + ascii_file + ' %s %0.2f' % (types[min],
                                                                  chi2.min())
コード例 #14
0
ファイル: dr1.py プロジェクト: gbrammer/unicorn
def udf_prepare():
    """
    Make images and catalogs for extracting UDF spectra
    """
    import os
    import threedhst
    import unicorn
    import threedhst.prep_flt_files
    from threedhst.prep_flt_files import process_3dhst_pair as pair
    import threedhst.catIO as catIO
    
    os.chdir(unicorn.GRISM_HOME+'UDF/PREP_FLT')
    
    ALIGN = '../XDF/xdfh_sci.fits'
    ALIGN_EXT=0
    
    info = catIO.Readfile('files.info')
    
    #### Make ASN files for different grisms / orientations
    asn = threedhst.utils.ASNFile(glob.glob('../RAW/i*asn.fits')[0])
    
    ## 10-26
    for d in ['10-26','11-01']:
        match = (info.targname == 'PRIMO') & (info.filter == 'G141') & (info.date_obs == '2010-%s' %(d))
        asn.exposures = []
        for exp in info.file[match]:
            asn.exposures.append(exp.split('_flt')[0])
        #
        asn.product = 'PRIMO-%s-G141' %(d.replace('-',''))
        asn.write(asn.product+'_asn.fits', clobber=True)
        #
        match = (info.targname == 'PRIMO') & (info.filter != 'G141') & (info.date_obs == '2010-%s' %(d))
        filt = info.filter[match][0]
        asn.exposures = []
        for exp in info.file[match]:
            asn.exposures.append(exp.split('_flt')[0])
        #
        asn.product = 'PRIMO-%s-%s' %(d.replace('-',''), filt)
        asn.write(asn.product+'_asn.fits', clobber=True)
        
    for pointing in [34,36,37,38]:
        for filt in ['F140W','G141']:
            match = (info.targname == 'GOODS-SOUTH-%d' %(pointing)) & (info.filter == filt)
            asn.exposures = []
            for exp in info.file[match]:
                asn.exposures.append(exp.split('_flt')[0])
            #
            asn.product = 'GOODS-SOUTH-%d-%s' %(pointing, filt)
            asn.write(asn.product+'_asn.fits', clobber=True)
    
    
    ##### Run background subtraction on all images        
    direct = glob.glob('*[0-9]-F*asn.fits')
    grism = glob.glob('*[0-9]-G141_asn.fits')
    
    for i in range(len(direct)):
        if not os.path.exists(grism[i].replace('asn','drz')):
            pair(direct[i], grism[i], adjust_targname=False, ALIGN_IMAGE = ALIGN, ALIGN_EXTENSION=ALIGN_EXT, SKIP_GRISM=False, GET_SHIFT=True, SKIP_DIRECT=False, align_geometry='rotate,shift')
    
    ### Fix offsets for 1026 since aperture combination was different
    files=['ibfup1myq_flt.fits','ibfup1n1q_flt.fits']
    for file in files:
        im = pyfits.open(file, mode='update')
        im[0].header.update('POSTARG1', im[0].header['POSTARG1']+8.814150)
        im[0].header.update('POSTARG2', im[0].header['POSTARG2']+0.025025)
        im.flush()
        
    #### Interlaced combinations
    ## Need to fake a combination for the interlaced direct image for PRIMO
    ## The first image is the direct image and the rest are G141 exposures
    ## to fill a 2x2 interlaced array
    ##
    ## Give them "F140W" filenames to work with interlacing code
    for d, f in zip(['1026', '1101'], ['F160W', 'F125W']):
        asn_im = threedhst.utils.ASNFile('PRIMO-'+d+'-%s_asn.fits' %(f))
        asn = threedhst.utils.ASNFile('PRIMO-'+d+'-G141_asn.fits')
        sf = threedhst.shifts.ShiftFile('PRIMO-'+d+'-G141_shifts.txt')
        #
        asn.exposures[0] = asn_im.exposures[0]
        sf.images[0] = asn.exposures[0]+'_flt.fits'
        #
        ###  Enough images to fill 2x2 grid: (Even-Even, Odd-Odd, OE, EO)
        # xo, yo = unicorn.reduce.get_interlace_offsets('PRIMO-'+d+'-G141_asn.fits', verbose=1, path_to_flt='./')
        keep = [0,1,3,5]
        for i in range(len(asn.exposures))[::-1]:
            if i not in keep:
                p = asn.exposures.pop(i)
                p = sf.images.pop(i)
                p = sf.xshift.pop(i)
                p = sf.yshift.pop(i)
                p = sf.scale.pop(i)
                p = sf.rotate.pop(i)
        #
        ### Image is combination of G141, F140W but need Multidrizzle outputs
        asn.product = 'PRIMO-'+d+'-F140W'
        sf.nrows = 4
        asn.write('%s_asn.fits' %(asn.product), clobber=True)
        sf.write('%s_shifts.txt' %(asn.product))
        threedhst.prep_flt_files.startMultidrizzle(asn.product + '_asn.fits',
                     use_shiftfile=True, skysub=False,
                     final_scale=0.06, pixfrac=0.8, driz_cr=False,
                     updatewcs=False, clean=True, median=False)
    
    #####################
    #### Deep F160W reference for interlaced reductions
    #####################
    
    wht = pyfits.open('xdfh_wht.fits')
    sci = pyfits.open('xdfh_sci.fits')
    texp = 3.e3
    f = 10**(-0.4*(33.4549980163574-25.96))
    #wht[0].data = wht[0].data*1000.-
    wht[0].data = (1.e5*wht[0].data**2+1./((sci[0].data*f+0.5)/texp))*f**2
    wht.writeto('xdfh_VAR.fits', clobber=True)
    wht[0].data = 1./np.sqrt(wht[0].data)
    wht.writeto('xdfh_SIG.fits', clobber=True)
    sci[0].data *= f
    sci.writeto('xdfh_sci_scaled.fits')
    
    ## Make catalog
    os.chdir("/research/HST/GRISM/3DHST/UDF/XDF")
    se = threedhst.sex.SExtractor()
    se.aXeParams()
    se.copyConvFile()
    se.overwrite = True
    se.options['CATALOG_NAME']    = 'xdf.cat'
    se.options['CHECKIMAGE_NAME'] = 'xdf_seg.fits'
    se.options['CHECKIMAGE_TYPE'] = 'SEGMENTATION'
    se.options['WEIGHT_TYPE']     = 'MAP_WEIGHT'
    se.options['WEIGHT_IMAGE']    = 'xdfh_VAR.fits'
    se.options['FILTER']    = 'Y'
    se.options['DETECT_THRESH']    = '1.5'
    se.options['ANALYSIS_THRESH']  = '1.5'
    se.options['MAG_ZEROPOINT'] = '33.45499801'
    se.options['DEBLEND_NTHRESH'] = '64'
    se.options['DEBLEND_MINCONT'] = '0.00005'
    
    status = se.sextractImage('xdfh_sci.fits')
    
    threedhst.sex.sexcatRegions('xdf.cat', 'xdf.reg', format=2)
    
    ## Prep blot drizzle images
    REF_ROOT = 'XDF-F160W'
    CATALOG = '../XDF/xdf.cat'
    unicorn.reduce.prepare_blot_reference(REF_ROOT=REF_ROOT, filter='F160W', REFERENCE = '../XDF/xdfh_sci_scaled.fits', SEGM = '../XDF/xdf_seg.fits', sci_extension=0)
    
    REF_ROOT = 'HUDF12-F160W'
    CATALOG = '../HUDF12/hudf12.cat'
    unicorn.reduce.prepare_blot_reference(REF_ROOT=REF_ROOT, filter='F160W', REFERENCE = '../HUDF12/hlsp_hudf12_hst_wfc3ir_udfmain_f160w_v1.0_drz.fits', SEGM = '../HUDF12/hudf12_seg.fits', sci_extension=0)
    
    ## Use new F140W image
    REF_ROOT = 'HUDF12-F140W'
    CATALOG = '../F140W/HUDF12-F140W.cat'
    unicorn.reduce.prepare_blot_reference(REF_ROOT=REF_ROOT, filter='F140W', REFERENCE = '../F140W/HUDF12-F140W_drz_sci.fits', SEGM = '../F140W/HUDF12-F140W_seg.fits', sci_extension=0)
    
    ### Generate DRZ images
    files=glob.glob('*F*asn.fits')
    for file in files: 
        threedhst.prep_flt_files.startMultidrizzle(file,
                     use_shiftfile=True, skysub=False,
                     final_scale=0.06, pixfrac=0.8, driz_cr=False,
                     updatewcs=False, clean=True, median=False)
    
        
    NGROW=125
    ROOT = 'PRIMO-1101'
    ROOT = 'GOODS-SOUTH-34'
    for p in [34, 36, 37, 38]:
        ROOT = 'GOODS-SOUTH-%d' %(p)
        unicorn.reduce.blot_from_reference(REF_ROOT=REF_ROOT, DRZ_ROOT = ROOT+'-F140W', NGROW=NGROW, verbose=True)
        unicorn.reduce.interlace_combine_blot(root=ROOT+'-F140W', view=False, pad=60+200*(ROOT=='PRIMO-1026'), REF_ROOT=REF_ROOT, CATALOG=CATALOG,  NGROW=NGROW, verbose=True, auto_offsets=True, NSEGPIX=3)
        # seg = pyfits.open(ROOT+'_inter_seg.fits', mode='update')
        # seg[0].data[seg[0].data < 0] = 0
        # seg.flush()
        #
        unicorn.reduce.interlace_combine(root=ROOT+'-G141', view=False, pad=60+200*(ROOT=='PRIMO-1026'),  NGROW=NGROW, auto_offsets=True)
        unicorn.reduce.interlace_combine(root=ROOT+'-F140W', view=False, pad=60+200*(ROOT=='PRIMO-1026'),  NGROW=NGROW, auto_offsets=True)
        #
        ref = pyfits.open(ROOT+'_ref_inter.fits', mode='update')
        ref[0].header['FILTER'] = 'F140W'
        ref.flush()
    
    ### Shift 1026-G141 image right by 130 pixels 
    #xo, yo = unicorn.reduce.get_interlace_offsets('PRIMO-1026-F140W_asn.fits', verbose=1, path_to_flt='./')
    im = pyfits.open('PRIMO-1026-G141_inter.fits', mode='update')
    fill = im[1].data*0
    fill[:,480:2580] = im[1].data[:,350:2450]
    im[1].data = fill*1
    fill = im[2].data*0
    fill[:,480:2580] = im[2].data[:,350:2450]
    im[2].data = fill*1
    im.flush()
    
    #### Shift 1101-G141 image down by 1 pixel
    im = pyfits.open('PRIMO-1101-G141_inter.fits', mode='update')
    fill = im[1].data*0
    fill[100:-100,:] = im[1].data[101:-99,:]
    im[1].data = fill*1
    fill = im[2].data*0
    fill[100:-100,:] = im[2].data[101:-99,:]
    im[2].data = fill*1
    im.flush()
    
    if ROOT.startswith('PRIMO'):
        ref = pyfits.open(ROOT+'_ref_inter.fits')
        im140 = pyfits.open(ROOT+'-F140W_inter.fits', mode='update')
        im140[0].header['FILTER'] = 'F140W'
        im140[1].data = ref[1].data #/ 10**(-0.4*(26.46-25.96))
        #im140[2].data = im140[2].data # / 10**(-0.4*(26.46-25.96))
        im140.flush()
    
    #
    files = glob.glob('*ref_inter.fits')
    for file in files:
        ROOT=file.split('_ref_inter')[0]
        model = unicorn.reduce.process_GrismModel(root=ROOT, MAG_LIMIT=28, REFINE_MAG_LIMIT=23, make_zeroth_model=False, grism='G141')
        model.make_wcs_region_file()
        
    #model = unicorn.reduce.GrismModel(root=ROOT, MAG_LIMIT=30, grism='G141')
    ### Force use F160W as detection image
    #if ROOT.startswith('PRIMO'):
    #model.direct[1].data = model.im[1].data*1./10**(-0.4*(26.46-25.96))
    
    model.get_corrected_wcs()
    model.make_wcs_region_file()
    
    #for p in [34,36,37,38]:
    #    ROOT = 'GOODS-SOUTH-%d' %(p)
    
    
    ##### Extract all objects
    c = threedhst.sex.mySexCat('../F140W/HUDF12-F140W.cat')
    c.write(c.filename.replace('.cat','.reform.cat'), reformat_header=True)
    c = catIO.Readfile('../F140W/HUDF12-F140W.reform.cat')
    ok = c.mag_auto < 26
    hudf.extract_all(c.number[ok], miny=-80)
    
    ### FOr some reason, some 2D files weren't extracted with 80 pix.  Redo those
    bad = []
    files=glob.glob('*2D.fits')
    for 
コード例 #15
0
ファイル: bg_ISR.py プロジェクト: gbrammer/wfc3
def orbit_tracks(FILTER='F105W',
                 PATH='/Users/brammer/WFC3/Backgrounds/BroadBand/F105W',
                 exposures=['ibp329iq', 'ibp329is'],
                 axes=None,
                 axlabels=[1, 1, 1]):
    """
    Make a figure showing the orbit tracks / illuminated or not and the Term angle
    and compare to the observed background level (and predicted zodi).
    """

    ### F105W (all HUDF-DEEP-WFC3)
    # FILTER = 'F105W'
    # PATH = '/Users/brammer/WFC3/Backgrounds/BroadBand/F105W'
    # exposures = ['ibp329iq', 'ibp329is']

    # FILTER = 'F125W'
    # PATH = '/Users/brammer/WFC3/Backgrounds/BroadBand/Others'
    # exposures = ['ib5x19tm', 'ib5x19tq']
    #
    # FILTER = 'F160W'
    # PATH = '/Users/brammer/WFC3/Backgrounds/BroadBand/Others'
    # exposures = ['ib5x21ht', 'ib5x21hw']

    # FILTER = 'G102'
    # PATH = '/Users/brammer/WFC3/GrismPrograms/Koekemoer/RAW/'
    # exposures = ['ibl003ac', 'ibl003ae', 'ibl003af', 'ibl003ag']
    # PATH = '/Users/brammer/WFC3/GrismPrograms/Stanford/RAW/'
    # exposures = ['ibkn06dh', 'ibkn06dm', 'ibkn06dp', 'ibkn06dt']

    # FILTER = 'G141'
    # PATH = '/Users/brammer/WFC3/GrismPrograms/Koekemoer/RAW/'
    # exposures = ['ibl003a7', 'ibl003a9', 'ibl003aa', 'ibl003ab']

    # PATH = '/Users/brammer/WFC3/GrismPrograms/CANDELS-SNe/RAW/'
    # exposures = ['ibfug1je', 'ibfug1jh']

    if axes is None:
        fig = unicorn.plotting.plot_init(xs=3,
                                         aspect=1 / 0.5,
                                         left=0.1,
                                         top=0.05,
                                         bottom=0.1,
                                         hspace=0.02,
                                         use_tex=True,
                                         NO_GUI=True)
        ax = fig.add_subplot(212)
        ax_limb = fig.add_subplot(211)
    else:
        ax, ax_limb = axes

    N = len(exposures)

    ax.plot([-10, 100], [200, 200],
            linestyle='--',
            label='LimbAng = 20',
            color='black')  ## dummy for label

    for i, exp in enumerate(exposures):
        dat = catIO.Readfile('%s/%sj_%s_orbit.dat' % (PATH, exp, FILTER),
                             save_fits=False)
        raw = pyfits.open(gzfile('%s/%sq_raw.fits' % (PATH, exp)))
        #bg_min = mywfc3.zodi.flt_zodi(raw.filename(), verbose=False)
        bg_min = dat.zodi
        #
        spt = pyfits.getheader(gzfile('%s/%sq_spt.fits' % (PATH, exp)), 0)
        #### Start/stop times
        pstr = astropy.time.Time(spt['PSTRTIME'].replace('.', ':'),
                                 format='yday',
                                 in_subfmt='date_hms',
                                 scale='utc')
        if i == 0:
            tstr = pstr

        NSAMP = raw[0].header['NSAMP']
        times = np.zeros(NSAMP - 1)
        for j in range(NSAMP - 1):
            times[j] = raw['SCI', j + 1].header['SAMPTIME']

        times = (times[::-1][1:] + (pstr - tstr).sec) / 60.

        ax.plot(times, dat.bg, color='black', linewidth=2)
        l40 = dat.limbang > 40
        ax.plot(times[l40],
                dat.bg[l40],
                color='red',
                alpha=0.4,
                linewidth=4,
                zorder=-10,
                label=r'LimbAng $>$ 40' * (i == 0))

        shadow = dat.shadow == 1
        ax.plot(times[shadow],
                dat.bg[shadow],
                color='black',
                alpha=0.2,
                linewidth=7,
                zorder=-20,
                label=r'SHADOW' * (i == 0))
        ax_limb.plot(times[shadow],
                     dat.limbang[shadow],
                     color='black',
                     alpha=0.2,
                     linewidth=7,
                     zorder=-20,
                     label=r'SHADOW' * (i == 0))

        day = dat.brightlimb > 0
        ax.plot(times[day],
                dat.bg[day],
                color='blue',
                alpha=0.4,
                linewidth=7,
                zorder=-10,
                label='BrightLimb = 1' * (i == 0))

        #
        ax.plot(times,
                dat.bg * 0. + bg_min,
                color='black',
                linestyle=':',
                linewidth=2,
                label='Predicted zodi' * (i == 0))
        ax.text(times[0] + 0.2,
                0.04 + (N == 4) * ((i % 2) * 0.2 - 0.1),
                exp + 'q',
                ha='left',
                va='bottom',
                fontsize=6)
        #
        ax_limb.plot(times, dat.limbang, color='black', linewidth=2)
        ax_limb.plot(times[day],
                     dat.limbang[day],
                     color='blue',
                     alpha=0.4,
                     linewidth=7,
                     zorder=-10)
        ax_limb.plot(times[l40],
                     dat.limbang[l40],
                     color='red',
                     alpha=0.4,
                     linewidth=4,
                     zorder=-10)
        #ax_limb.plot(times, dat.termang, color='green', alpha=0.5, linewidth=2, label=['TermAng','','',''][i])

    ax.set_ylim(-0.3, 3.4)
    ax.set_xlim(-5, 55)
    ax.set_xlabel(r'$\Delta\,t$ (minutes)')

    ax_limb.plot([-10, 100], [20, 20], linestyle='--', color='black')

    ax_limb.set_ylim(2, 100)
    ax_limb.set_xticklabels([])
    ax_limb.set_xlim(-5, 55)
    ax_limb.set_title(FILTER, size=10)

    if axlabels[0] == 0:
        ax_limb.set_xticklabels([])

    if axlabels[1] == 0:
        ax_limb.set_yticklabels([])
        ax.set_yticklabels([])
    else:
        ax_limb.set_ylabel('LimbAng')
        ax.set_ylabel('Background (e- / s)')

    if axlabels[2]:
        ax.legend(loc='upper left', prop={'size': 7})
        #ax_limb.legend(loc='lower left', prop={'size':8})

    if axes is None:
        unicorn.plotting.savefig(fig, 'track_%s.pdf' % (FILTER))
コード例 #16
0
ファイル: bg_ISR.py プロジェクト: gbrammer/wfc3
def excess_statistics():
    """
    Show cumulative distribution of (read) excess backgrounds
    """

    master = {}
    master['F105W'] = catIO.Readfile(
        '/user/brammer/WFC3_Backgrounds/F105W/master.dat')
    master['G141'] = catIO.Readfile(
        '/user/brammer/WFC3_Backgrounds/GrismPrograms/master_G141.dat')
    master['G102'] = catIO.Readfile(
        '/user/brammer/WFC3_Backgrounds/GrismPrograms/master_G102.dat')

    la, ra, ta, ba = 0.06, 0.06, 0.07, 0.13

    NX, NY = 1, 1
    aspect = (NY * (1 + ta + ba)) / ((3. * NX) * (1 + la + ra))
    dx = (1 - la - ra) / 3.

    fig = unicorn.plotting.plot_init(xs=8,
                                     aspect=aspect,
                                     left=0,
                                     right=0,
                                     top=0,
                                     bottom=0,
                                     hspace=0.0,
                                     wspace=0,
                                     use_tex=True,
                                     NO_GUI=True)

    for i in range(3):
        bgf = master[master.keys()[i]]
        ax = fig.add_axes((la + dx * i, ba, dx, 1 - ba - ta))
        #
        bg_ratio, xr = bgf.bg - bgf.zodi, (-0.5, 6)
        bg_ratio, xr = bgf.bg / bgf.zodi, (0.5, 6.8)
        #
        yh1, xh1, nn = ax.hist(bg_ratio[bgf.shadow == 1],
                               range=xr,
                               bins=100,
                               alpha=0.2,
                               color='black',
                               histtype='stepfilled',
                               log=True)
        yhc1 = np.cumsum(yh1[::-1])
        #
        yh0, xh0, nn = ax.hist(bg_ratio[bgf.shadow == 0],
                               range=xr,
                               bins=100,
                               alpha=0.2,
                               color='red',
                               histtype='stepfilled',
                               log=True)
        yhc0 = np.cumsum(yh0[::-1])
        #
        axr = ax.twinx()
        #
        if i == 0:
            ax.plot([100, 110], [1.e4, 1.e4],
                    linewidth=8,
                    color='black',
                    alpha=0.2,
                    label='SHADOW = True')
            ax.plot([100, 110], [1.e4, 1.e4],
                    linewidth=8,
                    color='red',
                    alpha=0.2,
                    label='SHADOW = False')
            ax.plot([100, 110], [1.e4, 1.e4],
                    linewidth=1.2,
                    color='red',
                    alpha=1,
                    label=r'$f(>X)$')
            ax.legend(loc='right',
                      prop={'size': 9},
                      scatterpoints=1,
                      frameon=True)

        #
        yi = np.interp(2, xh0[1:], yhc0[::-1] * 1. / bgf.N)
        axr.scatter([2], [yi],
                    color='red',
                    marker='o',
                    label=r'$f(X>2)$ = %.1f' % (yi * 100) + '\%')
        axr.plot(xh1[1:][::-1],
                 yhc1 * 1. / bgf.N,
                 color='black',
                 linewidth=1.2)
        axr.plot(xh0[1:][::-1], yhc0 * 1. / bgf.N, color='red', linewidth=1.2)
        #
        ### efficiency = 1/X
        eff = 1 / (np.clip(bgf.bg / bgf.zodi, 1, 10))
        axr.scatter([100, 110], [-1, -1],
                    linewidth=1,
                    color='white',
                    alpha=0.2,
                    label='eff. = %d' % (np.sum(eff) / bgf.N * 100) + '\%')

        axr.legend(loc='upper right',
                   prop={'size': 9},
                   scatterpoints=1,
                   frameon=False)
        ax.set_xlim(xr)
        #ax.set_ylim(5,yh1.max()*1.2)
        ax.set_ylim(5, 3000)
        axr.set_ylim(0, 0.65)
        if i > 0:
            ax.set_yticklabels([])
        else:
            ax.set_ylabel(r'$N_\mathrm{read}$')
        if i < 2:
            axr.set_yticklabels([])
        else:
            axr.set_ylabel(r'$f(>X)$')

        ax.set_title(master.keys()[i], size=10)
        if i == 1:
            ax.set_xlabel(r'$X$ = background observed / predicted zodi')
    #
    unicorn.plotting.savefig(fig, '/tmp/excess_statistics.pdf')
コード例 #17
0
def z2_galaxies(zrange=(1.6, 2.4)):
    os.chdir('/research/HST/GRISM/3DHST/ANALYSIS/GALFIT/TEST/COSMOS/Z2_COSMOS')

    root_path = '/research/HST/GRISM/3DHST/COSMOS/HTML_v1.0/'
    root = 'orient1'

    co = catIO.Readfile('../orient1_galfit.cat')
    co.re *= np.sqrt(1. / co.bovera)

    #### Select z~2
    use = (co.z_peak >= zrange[0]) & (co.z_peak <= zrange[1]) & (
        co.logm > 10.9) & (co.r_match < 1) & (co.re > 0)
    idx = np.arange(len(use))
    use = idx[use]
    use = use[np.argsort(co.re[use])]

    #### Angular scale
    zgrid = np.array([
        0.2, 0.3, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.8, 0.9, 1.0, 1.1,
        1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5
    ])
    scale = np.array([
        3.268, 4.421, 5.343, 5.733, 6.082, 6.394, 6.673, 6.922, 7.144, 7.518,
        7.812, 8.041, 8.216, 8.346, 8.439, 8.502, 8.539, 8.556, 8.555, 8.540,
        8.512, 8.475, 8.430, 8.377, 8.320, 8.257, 8.192
    ])
    sint = np.interp(co.z_peak, zgrid, scale)

    co.re_kpc = co.re * 0.06 * sint

    fp = open('z2.html', 'w')
    fp.write("""
    <html>
    <head>
    <link rel="stylesheet" href="http://localhost/~gbrammer/COSMOS/scripts/style.css" type="text/css" id="" media="print, projection, screen" /> 
    
    <script type="text/javascript" src="http://localhost/~gbrammer/COSMOS/scripts/jquery-1.4.2.min.js"></script>
    
    <script type="text/javascript" src="http://localhost/~gbrammer/COSMOS/scripts/jquery.tablesorter.min.js"></script> 
    
    <script type="text/javascript" id="js">
    
    // Add ability to sort the table
    $(document).ready(function() {
        $.tablesorter.defaults.sortList = [[4,4]]; 
        $("table").tablesorter({
                // pass the headers argument and assing a object
                headers: {
                        // assign the secound column (we start counting zero)
                        4: {
                                sorter: false
                        },
                        5: {
                                sorter: false
                        },
                }
        });        
    });
    </script>
    
    </head>
    <body>
    <table id="myTable" cellspacing="1" class="tablesorter"> 
    <thead>
        <th> Grism id </th>
        <th> z </th>
        <th> logM </th>
        <th> r_e </th>
        <th> Thumb </th>
        <th> SED </th>
    </thead>
    <tbody>
    """)

    old = 0
    for i in use:
        if co.id_f140w[i] == old:
            root = 'orient2'
        else:
            root = 'orient1'
        old = co.id_f140w[i]

        file = "%s_%05d" % (root, co.id_f140w[i])

        if not os.path.exists('%s/SED/%s_SED.png' % (root_path, file)):
            root = 'orient2'

        file = "%s_%05d" % (root, co.id_f140w[i])

        fp.write("""
    <tr>
        <td> %s </td>
        <td> %5.2f </td>
        <td> %5.2f </td>
        <td> %5.2f </td>
        <td> <img src=../%s_galfit.png height=180px> </td>
        <td> <img src=%s/SED/%s_SED.png height=180px> </td>
    </tr>
    """ % (file, co.z_peak[i], co.logm[i], co.re_kpc[i], file, root_path,
           file))

    fp.write("</tbody></table></body></html>")
    fp.close()
コード例 #18
0
def check_redshifts():
    
    import astropy.cosmology as cc
    cosmo = cc.LambdaCDM(H0=70, Om0=0.3, Ode0=0.7)
    
    c = catIO.Readfile('../Catalog/SN-MARSHALL-F160W.reform.cat')
    c*k = c.number < -100
    zc = catIO.Readfile('marshall_redshifts.dat')
    for id in zc.id:
        idi = int(id[-5:])
        #print idi, c*k.sum()
        c*k = c*k | (c.number == idi)
    #
    star = (c.mag_auto[c*k] < 24) & (c.flux_radius[c*k] < 2.6)
    
    cat, zout, fout = unicorn.analysis.read_catalogs('UDS')
    mat = catIO.CoordinateMatcher(cat)
    dr, idx = mat.match_list(c.x_world, c.y_world)
    logm, logm_z, z_phot = fout.lmass[idx][c*k], fout.z[idx][c*k], zout.z_peak[idx][c*k]
    
    ok = (zc.z_max > 0) & ~star
    dz95 = (zc.u95-zc.l95)/(1+zc.z_max)
    dz68 = (zc.u68-zc.l68)/(1+zc.z_max)
    best = ok & (dz68 < 0.015)
    
    #plt.scatter(zc.z_max[best], zc.mag[best], alpha=0.5)
    #plt.scatter(zc.z_max[best], logm[best], alpha=0.5)
    
    zr = [0,4]
    zrange = np.log(1+np.array(zr))
    dz = 0.01
    nbins = zrange[1]/dz
    h = np.histogram(np.log(1+zc.z_max[best]), bins=nbins, range=zrange)
    h_idx = np.digitize(np.log(1+zc.z_max), h[1])
    z = np.exp(h[1][:-1]*0.5+h[1][1:]*0.5)-1
    cmv = h[1]*0.
    for i in range(len(h[1])):
        cmv[i] = cosmo.comoving_volume(h[1][i])/(4*np.pi*(360./2/np.pi)**2)
        
    survey_area = (2.2/60.)**2 #sq deg
    dcmv = np.diff(cmv)*survey_area
    
    nh = np.maximum(h[0], 0.01)
    plt.plot(z, nh, linestyle='steps')
        
    peak = np.abs(np.log(1+zc.z_max)-np.log(1+0.904)) < 0.005

    peak = np.abs(np.log(1+zc.z_max)-np.log(1+1.912)) < 0.005

    peak = np.abs(np.log(1+zc.z_max)-np.log(1+2.310)) < 0.005
    
    zbin = 0.904; bin_id = np.arange(len(h[1]))[z >= zbin][0]+1
    zbin = 1.912; bin_id = np.arange(len(h[1]))[z >= zbin][0]+1
    zbin = 1.522; bin_id = np.arange(len(h[1]))[z >= zbin][0]+1
    zbin = 2.310; bin_id = np.arange(len(h[1]))[z >= zbin][0]+1
    
    sel = (h_idx == bin_id) & best
    
    fp = open('/tmp/view_list','w')
    for id in zc.id[sel]:
        fp.write('%s.new_zfit.png\n' %(id))
        fp.write('%s.new_zfit.2D.png\n' %(id))
        fp.write('%s_stack.png\n' %(id))
    
    fp.close()
    os.system('open `cat /tmp/view_list`')
    
    plt.scatter(c.x_image, c.y_image, color='black', alpha=0.1)
    plt.scatter(c.x_image[c*k][sel], c.y_image[c*k][sel], color='red', alpha=0.8)
    
    
    
    
コード例 #19
0
ファイル: background.py プロジェクト: gbrammer/unicorn
def make_g141_bg():
    """
    Make average background images with object masks
    """
    from pyraf import iraf

    os.chdir("/3DHST/Spectra/Work/Background")

    field = 'COSMOS'

    PATHS = []
    files = []

    for field in ['COSMOS', 'GOODS-N', 'GOODS-S', 'AEGIS', 'UDS']:
        info = catIO.Readfile('/3DHST/Spectra/Work/%s/PREP_FLT/files.info' %
                              (field))
        field_files = info.file[info.filter == 'G141']
        files.extend(field_files)
        PATHS.extend(['/3DHST/Spectra/Work/%s/RAW/' % (field)] *
                     len(info.file[info.filter == 'G141']))

    field = 'ALL'

    #files = glob.glob('ibhm*flt.seg.fits')
    #PATH = ('/3DHST/Spectra/Work/%s/RAW/' %(field))*len(files)

    # #### Direct flat-field
    flat = flat_g141[1].data[5:1019, 5:1019] / pyfits.open(
        'COSMOS_f140w_flat.fits')[1].data[5:-5, 5:-5]
    flat[flat <= 0] = 5
    flat[flat > 5] = 5

    NF = len(files)
    idx = np.arange(NF)
    nxpix, nypix = 1014, 1014

    #nxpix, nypix = 507, 507

    X = np.zeros((NF, nxpix * nypix))

    ## Otherwise get it from "show_profile" above
    test = idx > -10

    for j, i in enumerate(idx):
        if ~test[i]:
            continue
        #
        fi = files[i]
        if not os.path.exists(fi.replace('flt', 'flt.seg')):
            continue
        #
        if os.path.exists(fi.replace('.gz', '') + '.mask.reg'):
            continue
        #
        flt = pyfits.open(PATHS[i] + files[i])
        flt[1].data *= flat
        print unicorn.noNewLine + '%d %s %s' % (i, files[i],
                                                flt[0].header['PFLTFILE'])
        #
        ### Segmentation mask
        masked = pyfits.open(fi.replace('flt', 'flt.seg'))[0].data == 0
        ### DQ mask, hot pixels and the "death star"
        dq_ok = (flt[3].data & (4 + 32 + 16)) == 0
        #
        ok = masked & np.isfinite(flt[1].data) & (dq_ok)
        #flt[1].data /= np.median(flt[1].data[ok])
        flt[1].data /= threedhst.utils.biweight(flt[1].data[ok], mean=True)
        flt[1].data[(ok == False)] = 0
        X[j, :] = flt[1].data[0:nypix, 0:nxpix].flatten()
        #
        #pyfits.writeto(files[i].replace('flt','msk').replace('.gz',''), flt[1].data, clobber=True, header=flt[1].header)

    #### Average
    #nsum = np.sum(X != 0, axis=0).reshape(1014,1014)
    #avg = np.sum(X, axis=0).reshape(1014,1014)/nsum

    for field in ['COSMOS', 'GOODS-N', 'GOODS-S', 'AEGIS', 'UDS']:
        info = catIO.Readfile('/3DHST/Spectra/Work/%s/PREP_FLT/files.info' %
                              (field))
        field_files = info.file[info.filter == 'G141']
        fp = open(field + '.g141.list', 'w')
        for ff in field_files:
            msk = ff.replace('flt.fits.gz', 'msk.fits')
            if os.path.exists(msk):
                fp.write('%s\n' % (msk))
        fp.close()
        #
        iraf.imcombine(input='@%s.g141.list' % (field),
                       output='combined_g141_%s' % (field),
                       headers='',
                       bpmasks='',
                       rejmasks='',
                       nrejmasks='',
                       expmasks='',
                       sigmas='',
                       logfile='STDOUT',
                       combine='average',
                       reject='minmax',
                       project=iraf.no,
                       outtype='real',
                       outlimits='',
                       offsets='none',
                       masktype='none',
                       maskvalue='0',
                       blank=0.0,
                       scale='none',
                       zero='none',
                       weight='none',
                       statsec='',
                       expname='',
                       lthreshold=0.02,
                       hthreshold=20.0,
                       nlow=3,
                       nhigh=3,
                       nkeep=1,
                       mclip=iraf.yes,
                       lsigma=3.0,
                       hsigma=3.0,
                       rdnoise='0.',
                       gain='1.',
                       snoise='0.',
                       sigscale=0.1,
                       pclip=-0.5)

    fp = open('msk_list', 'w')
    for file in files:
        fp.write(file + '\n')
    fp.close()

    iraf.imcombine(input='@msk_list',
                   output='combine_masked',
                   headers='',
                   bpmasks='',
                   rejmasks='',
                   nrejmasks='',
                   expmasks='',
                   sigmas='',
                   logfile='STDOUT',
                   combine='average',
                   reject='minmax',
                   project=iraf.no,
                   outtype='real',
                   outlimits='',
                   offsets='none',
                   masktype='none',
                   maskvalue='0',
                   blank=0.0,
                   scale='none',
                   zero='none',
                   weight='none',
                   statsec='',
                   expname='',
                   lthreshold=1e-06,
                   hthreshold=100.0,
                   nlow=5,
                   nhigh=5,
                   nkeep=1,
                   mclip=iraf.yes,
                   lsigma=3.0,
                   hsigma=3.0,
                   rdnoise='0.',
                   gain='1.',
                   snoise='0.',
                   sigscale=0.1,
                   pclip=-0.5)

    sky = pyfits.open('combine_COSMOS.fits')[0].data

    # #### Average
    # nsum = np.sum(X != 0, axis=0).reshape(nypix,nxpix)
    # avg = np.sum(X, axis=0).reshape(nypix,nxpix)/nsum
    #
    # ### Fill empty pixels with no input images
    # sky = avg
    x, y = np.where((np.isfinite(sky) == False) | (sky == 0))
    NX = len(x)
    pad = 1
    for i in range(NX):
        xi = x[i]
        yi = y[i]
        sub = sky[xi - pad:xi + pad + 2, yi - pad:yi + pad + 2]
        if (np.sum(sub) != 0.0):
            sky[xi, yi] = np.median(sub[np.isfinite(sub)])

    still_bad = (np.isfinite(sky) == False) | (sky <= 0.01)
    sky[still_bad] = flat[0:nypix, 0:nxpix][still_bad]

    # bad_flat = (flat < 0.5)
    # sky[bad_flat] = flat[bad_flat]

    im_sky = pyfits.PrimaryHDU(data=sky)
    im_n = pyfits.ImageHDU(data=nsum)
    im = pyfits.HDUList([im_sky, im_n])
    im.writeto('sky.fits', clobber=True)

    #### for DIRECT flat
    flatim = pyfits.open('/3DHST/Spectra/Work/CONF/sky_cosmos.fits')
    flatim[0].data = sky
    flatim[1].data = sky
    #flatim[3].data[5:-5,5:-5] = nsum
    flatim.writeto('%s_g141_flat.fits' % (field), clobber=True)