コード例 #1
0
ファイル: shifts.py プロジェクト: kewhitaker/threedhst
def checkShiftfile(asn_direct):
    """
checkShiftfile(asn_direct)
    
    Make sure that there is a line in the shiftfile for each exposure 
    in the ASN table.  Also check that no scales are zero.
    """
    from threedhst.utils import ASNFile

    asn = ASNFile(asn_direct)

    sf_file = asn_direct.split("_asn.fits")[0] + "_shifts.txt"
    sf = ShiftFile(sf_file)
    flag = False
    for exp in asn.exposures:
        if exp + "_flt.fits" not in sf.images:
            flag = True
            print "Exposure, %s, not in %s" % (exp, sf_file)
            # print sf.nrows
            sf.append(exp + "_flt.fits")
            # print sf.nrows

    #### Check if scales are zero in the shiftfile
    if 0.0 in sf.scale:
        flag = True
        print "Found scale=0 in the shiftfile, setting to default no shift/rotation\n"
        for i in range(len(sf.scale)):
            sf.xshift[i], sf.yshift[i], sf.rotate[i], sf.scale[i] = 0.0, 0.0, 0.0, 1.0

    if flag:
        sf.write(sf_file)
    else:
        # print "\n3DHST.shifts.checkShiftfile: %s looks OK.\n" %sf_file
        threedhst.showMessage("Shiftfile, %s, looks OK" % sf_file)
コード例 #2
0
    def write_fits(self):
        """
        Save the ascii catalog data into a FITS bintable.
        
        The modification date of the ascii catalog is saved in the 'MODTIME'
        keyword of the FITS file
        """
        import time

        formats = {}
        formats['bool'] = 'L'
        formats['int16'] = 'I'
        formats['int32'] = 'J'
        formats['int64'] = 'K'
        formats['float32'] = 'E'
        formats['float64'] = 'D'

        formats['>i8'] = 'K'
        formats['>f8'] = 'D'

        #### Make the table columns, translating numpy data types to "TFORM"
        coldefs = []
        for column in self.columns:
            dtype = str(self.__getitem__(column).dtype)
            #print column, dtype
            if dtype in list(formats.keys()):
                TFORM = formats[dtype]
            else:
                if 'S' not in dtype:
                    threedhst.showMessage('Unrecognized data type in %s: %s' %
                                          (self.filename, dtype),
                                          warn=True)
                    return False
                #
                TFORM = 'A' + dtype.split('S')[1]
            #
            data = self.__getitem__(column)
            if '>' in dtype:
                cast_types = {'>i8': np.int64, '>f8': np.float64}
                data = np.cast[cast_types[dtype]](data)
            #
            coldefs.append(pyfits.Column(name=column, array=data,
                                         format=TFORM))

        #### Done, now make the binary table
        tbhdu = pyfits.new_table(coldefs)

        #### Primary HDU
        hdu = pyfits.PrimaryHDU()
        thdulist = pyfits.HDUList([hdu, tbhdu])

        #### Add modification time of "infile" to FITS header
        infile_mod_time = time.strftime(
            "%m/%d/%Y %I:%M:%S %p",
            time.localtime(os.path.getmtime(self.filename)))

        thdulist[1].header.update('MODTIME', infile_mod_time)

        thdulist.writeto(self.filename + '.FITS', clobber=True)
        return True
コード例 #3
0
ファイル: pab.py プロジェクト: gbrammer/pabeta
def run_lacosmic(flc='j9cv13pcq_flc.fits', split=1024, pssl=0.0, sigclip=4.0, objlim=3.8, sigfrac=0.3):
    """
    
    drizzlepac.astrodrizzle.AstroDrizzle('ESO550-IG02-13-083-F814W_asn.fits', skysub=True, clean=True, final_wcs=True, final_scale=0.05, final_pixfrac=0.8, context=False, resetbits=0, final_bits=576, preserve=False)
    
    """
    
    import cosmics
    import threedhst
    
    im = pyfits.open(flc, mode='update')
    h = im[0].header
    
    nx, ny = 4096/split, 2048/split
    
    for ext in [1,2]:
        if flc.startswith('i'):
            dq = np.zeros((2051, 4096), dtype=int)
        else:
            dq = np.zeros((2048, 4096), dtype=int)
            
        for i in range(nx):
            for j in range(ny):
                threedhst.showMessage('ext: %d, (i,j)=%d,%d' %(ext, i,j))
                subim = im['sci',ext].data[j*split:(j+1)*split, i*split:(i+1)*split]
                c = cosmics.cosmicsimage(subim, pssl=pssl, gain=1, readnoise=h['READNSEA'], sigclip=objlim, sigfrac=sigfrac, objlim=objlim, satlevel=84700.0, verbose=True)
                c.run(maxiter=4)
                dq[j*split:(j+1)*split, i*split:(i+1)*split] += c.mask*1
        
        im['dq',ext].data |= dq*4096
        
        pyfits.writeto(im.filename().replace('.fits', '_lac_%d.fits' %(ext)), data=dq*4096, header=im['dq', ext].header, clobber=True)
        
    im.flush()
コード例 #4
0
def scatter_annotate(x,
                     y,
                     labels,
                     xtol=None,
                     ytol=None,
                     ax=None,
                     *args,
                     **kwargs):
    if ax is None:
        axi = plt
    else:
        axi = ax

    if len(labels) != len(x):
        threedhst.showMessage(
            '`x`, `y`, and `labels` inputs must be same size', warn=True)
        return False

    if not isinstance(labels, list):
        labels = list(labels)

    plt.scatter(x, y, *args, **kwargs)
    af = AnnoteFinder(x, y, labels, xtol=xtol, ytol=ytol, axis=ax)
    plt.connect('button_press_event', af)

    return af
コード例 #5
0
ファイル: shifts.py プロジェクト: themiyan/threedhst
def checkShiftfile(asn_direct):
    """
checkShiftfile(asn_direct)
    
    Make sure that there is a line in the shiftfile for each exposure 
    in the ASN table.  Also check that no scales are zero.
    """
    from threedhst.utils import ASNFile
    asn = ASNFile(asn_direct)

    sf_file = asn_direct.split('_asn.fits')[0] + '_shifts.txt'
    sf = ShiftFile(sf_file)
    flag = False
    for exp in asn.exposures:
        if exp + '_flt.fits' not in sf.images:
            flag = True
            print 'Exposure, %s, not in %s' % (exp, sf_file)
            #print sf.nrows
            sf.append(exp + '_flt.fits')
            #print sf.nrows

    #### Check if scales are zero in the shiftfile
    if 0.0 in sf.scale:
        flag = True
        print 'Found scale=0 in the shiftfile, setting to default no shift/rotation\n'
        for i in range(len(sf.scale)):
            sf.xshift[i], sf.yshift[i], sf.rotate[i], sf.scale[
                i] = 0.0, 0.0, 0.0, 1.0

    if flag:
        sf.write(sf_file)
    else:
        #print "\n3DHST.shifts.checkShiftfile: %s looks OK.\n" %sf_file
        threedhst.showMessage('Shiftfile, %s, looks OK' % sf_file)
コード例 #6
0
def copy_adriz_headerlets(direct_asn='GOODS-S-15-F140W_asn.fits', grism_asn='GOODS-S-15-G141_asn.fits', order=None, force=False, ACS=False):
    """
    Copy Tweaked WCS solution in direct image to the paired grism exposures.
    
    If same number of grism as direct exposures, match the WCS headers
    directly.  If not, just get the overall shift from the first direct 
    exposure and apply that to the grism exposures.
    """
    import stwcs
    from stwcs import updatewcs
    import drizzlepac
    
    direct = threedhst.utils.ASNFile(direct_asn)
    grism = threedhst.utils.ASNFile(grism_asn)
    
    Nd = len(direct.exposures)
    Ng = len(grism.exposures)
    
    if ACS:
        NCHIP=2
        sci_ext = [1,4]
        ext = 'flc'
    else:
        NCHIP=1
        sci_ext = [1]
        ext = 'flt'
        
    if Nd == Ng:
        if order is None:
            order = range(Nd)
            
        for i in range(Nd):
            imd = pyfits.open('%s_%s.fits' %(direct.exposures[i], ext))
            #img = pyfits.open('%s_%s.fits' %(grism.exposures[i]))
            #
            for sci in sci_ext:
                #sci_ext=1
                direct_WCS = stwcs.wcsutil.HSTWCS(imd, ext=sci)
                #
                drizzlepac.updatehdr.update_wcs('%s_%s.fits' %(grism.exposures[order[i]], ext), sci, direct_WCS, verbose=True)    
    else:
        #### Get overall shift from a shift-file and apply it to the 
        #### grism exposures
        sf = threedhst.shifts.ShiftFile(direct_asn.replace('_asn.fits', '_shifts.txt'))
        imd = pyfits.open(direct_asn.replace('asn','wcs'))
        print imd.filename()
        direct_WCS = stwcs.wcsutil.HSTWCS(imd, ext='wcs')
        #
        for i in range(Ng):
            img = pyfits.open('%s_%s.fits' %(grism.exposures[i], ext))
            if 'WCSNAME' in img[1].header:
                if img[1].header['WCSNAME'] == 'TWEAK':
                    if force is False:
                        threedhst.showMessage('"TWEAK" WCS already found in %s_flt.fits.\nRun copy_adriz_headerlets with force=True to force update the shifts' %(grism.exposures[i]), warn=True)
                        continue
            #
            updatewcs.updatewcs('%s_%s.fits' %(grism.exposures[i], ext))
            drizzlepac.updatehdr.updatewcs_with_shift('%s_%s.fits' %(grism.exposures[i], ext), direct_WCS, rot=sf.rotate[0], scale=sf.scale[0], xsh=sf.xshift[0], ysh=sf.yshift[0], wcsname='TWEAK')
コード例 #7
0
ファイル: catIO.py プロジェクト: kewhitaker/threedhst
    def write_fits(self):
        """
        Save the ascii catalog data into a FITS bintable.
        
        The modification date of the ascii catalog is saved in the 'MODTIME'
        keyword of the FITS file
        """
        import time
        
        formats = {}
        formats['bool'] = 'L'
        formats['int16'] = 'I'
        formats['int32'] = 'J'
        formats['int64'] = 'K'
        formats['float32'] = 'E'
        formats['float64'] = 'D'
        
        formats['>i8'] = 'K'
        formats['>f8'] = 'D'
        
        #### Make the table columns, translating numpy data types to "TFORM"
        coldefs = []
        for column in self.columns:
            dtype = str(self.__getitem__(column).dtype)
            #print column, dtype
            if dtype in formats.keys():
                TFORM=formats[dtype]
            else:
                if 'S' not in dtype:
                    threedhst.showMessage('Unrecognized data type in %s: %s' %(self.filename, dtype), warn=True)
                    return False
                #
                TFORM = 'A'+dtype.split('S')[1]
            #
            data = self.__getitem__(column)
            if '>' in dtype:
                cast_types = {'>i8':np.int64, '>f8':np.float64}
                data = np.cast[cast_types[dtype]](data)
            #
            coldefs.append(pyfits.Column(name=column, array=data, format=TFORM))
        
        #### Done, now make the binary table
        tbhdu = pyfits.new_table(coldefs)

        #### Primary HDU
        hdu = pyfits.PrimaryHDU()
        thdulist = pyfits.HDUList([hdu,tbhdu])

        #### Add modification time of "infile" to FITS header
        infile_mod_time = time.strftime("%m/%d/%Y %I:%M:%S %p",
                            time.localtime(os.path.getmtime(self.filename)))
        
        thdulist[1].header.update('MODTIME',infile_mod_time)

        thdulist.writeto(self.filename+'.FITS', clobber=True)
        return True
コード例 #8
0
ファイル: regions.py プロジェクト: kewhitaker/threedhst
def asn_region(asn_file, path_to_flt="./"):
    """
asn_region(asn_file)
    
Create a DS9 region file for the exposures defined in an ASN file.
    
    """
    ##### Output file
    output_file = asn_file.split(".fits")[0] + ".pointing.reg"
    fp = open(output_file, "w")
    fp.write("fk5\n")  ### WCS coordinates
    ##### Read ASN file
    asn = threedhst.utils.ASNFile(asn_file)
    NEXP = len(asn.exposures)
    RAcenters = np.zeros(NEXP)
    DECcenters = np.zeros(NEXP)
    ##### Loop through exposures and get footprints
    for i, exp_root in enumerate(asn.exposures):
        flt_file = threedhst.utils.find_fits_gz(path_to_flt + "/" + exp_root.lower() + "_flt.fits", hard_break=True)

        # head = pyfits.getheader(exp_root.lower()+'_flt.fits')
        head = pyfits.getheader(flt_file)
        if head.get("INSTRUME") == "ACS":
            extensions = [1, 4]
        else:
            extensions = [1]

        for ext in extensions:
            regX, regY = wcs_polygon(flt_file, extension=ext)
            line = "polygon(%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f)" % (
                regX[0],
                regY[0],
                regX[1],
                regY[1],
                regX[2],
                regY[2],
                regX[3],
                regY[3],
            )

            RAcenters[i] += np.mean(regX) / len(extensions)
            DECcenters[i] += np.mean(regY) / len(extensions)

            fp.write(line + " # color=magenta\n")

    ##### Text label with ASN filename
    fp.write(
        '# text(%10.6f,%10.6f) text={%s} font="Helvetica 14 normal" color=magenta\n'
        % (np.mean(RAcenters), np.mean(DECcenters), asn_file.split("_asn.fits")[0])
    )
    fp.close()
    # print '3D-HST / ASN_REGION: %s\n' %(output_file)
    threedhst.showMessage("Create region file, %s." % output_file)
コード例 #9
0
ファイル: pab.py プロジェクト: gbrammer/pabeta
def run_lacosmic(flc='j9cv13pcq_flc.fits',
                 split=1024,
                 pssl=0.0,
                 sigclip=4.0,
                 objlim=3.8,
                 sigfrac=0.3):
    """
    
    drizzlepac.astrodrizzle.AstroDrizzle('ESO550-IG02-13-083-F814W_asn.fits', skysub=True, clean=True, final_wcs=True, final_scale=0.05, final_pixfrac=0.8, context=False, resetbits=0, final_bits=576, preserve=False)
    
    """

    import cosmics
    import threedhst

    im = pyfits.open(flc, mode='update')
    h = im[0].header

    nx, ny = 4096 / split, 2048 / split

    for ext in [1, 2]:
        if flc.startswith('i'):
            dq = np.zeros((2051, 4096), dtype=int)
        else:
            dq = np.zeros((2048, 4096), dtype=int)

        for i in range(nx):
            for j in range(ny):
                threedhst.showMessage('ext: %d, (i,j)=%d,%d' % (ext, i, j))
                subim = im['sci', ext].data[j * split:(j + 1) * split,
                                            i * split:(i + 1) * split]
                c = cosmics.cosmicsimage(subim,
                                         pssl=pssl,
                                         gain=1,
                                         readnoise=h['READNSEA'],
                                         sigclip=objlim,
                                         sigfrac=sigfrac,
                                         objlim=objlim,
                                         satlevel=84700.0,
                                         verbose=True)
                c.run(maxiter=4)
                dq[j * split:(j + 1) * split,
                   i * split:(i + 1) * split] += c.mask * 1

        im['dq', ext].data |= dq * 4096

        pyfits.writeto(im.filename().replace('.fits', '_lac_%d.fits' % (ext)),
                       data=dq * 4096,
                       header=im['dq', ext].header,
                       clobber=True)

    im.flush()
コード例 #10
0
    def read_bd_templates(self):
        import glob
        temps = glob.glob(self.template_path + '/spex*txt')
        if len(temps) == 0:
            threedhst.showMessage('No BD templates found in %s' %
                                  (self.template_path),
                                  warn=True)

        list = []
        for temp in temps:
            list.append(BD_template(temp))

        self.templates = list
        self.NTEMP = len(self.templates)
コード例 #11
0
ファイル: regions.py プロジェクト: kewhitaker/threedhst
def fits_regions(fits_list, output_file="list.reg", force_extension=None):
    ##### Output file
    fp = open(output_file, "w")
    fp.write("fk5\n")  ### WCS coordinates

    NEXP = len(fits_list)

    ##### Loop through exposures and get footprints
    for i, exp_root in enumerate(fits_list):
        flt_file = threedhst.utils.find_fits_gz(exp_root, hard_break=True)

        # head = pyfits.getheader(exp_root.lower()+'_flt.fits')
        head = pyfits.getheader(flt_file)
        if (head.get("INSTRUME") == "ACS") | ("UVIS" in head.get("APERTURE")):
            extensions = [1, 4]
        else:
            extensions = [1]

        if force_extension is not None:
            extensions = force_extension

        RAcenters, DECcenters = 0.0, 0.0

        for ext in extensions:
            regX, regY = wcs_polygon(flt_file, extension=ext)
            line = "polygon(%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f)" % (
                regX[0],
                regY[0],
                regX[1],
                regY[1],
                regX[2],
                regY[2],
                regX[3],
                regY[3],
            )
            fp.write(line + " # color=magenta\n")

            RAcenters += np.mean(regX) / len(extensions)
            DECcenters += np.mean(regY) / len(extensions)

            ##### Text label with ASN filename
            fp.write(
                '# text(%10.6f,%10.6f) text={%s} font="Helvetica 9 normal" color=magenta\n'
                % (RAcenters, DECcenters, exp_root.split(".fits")[0])
            )

    fp.close()
    threedhst.showMessage("Create region file, %s." % output_file)
コード例 #12
0
ファイル: regions.py プロジェクト: themiyan/threedhst
def asn_region(asn_file, path_to_flt='./'):
    """
asn_region(asn_file)
    
Create a DS9 region file for the exposures defined in an ASN file.
    
    """
    ##### Output file
    output_file = asn_file.split('.fits')[0] + '.pointing.reg'
    fp = open(output_file, 'w')
    fp.write('fk5\n')  ### WCS coordinates
    ##### Read ASN file
    asn = threedhst.utils.ASNFile(asn_file)
    NEXP = len(asn.exposures)
    RAcenters = np.zeros(NEXP)
    DECcenters = np.zeros(NEXP)
    ##### Loop through exposures and get footprints
    for i, exp_root in enumerate(asn.exposures):
        flt_file = threedhst.utils.find_fits_gz(path_to_flt + '/' +
                                                exp_root.lower() + '_flt.fits',
                                                hard_break=True)

        #head = pyfits.getheader(exp_root.lower()+'_flt.fits')
        head = pyfits.getheader(flt_file)
        if head.get('INSTRUME') == 'ACS':
            extensions = [1, 4]
        else:
            extensions = [1]

        for ext in extensions:
            regX, regY = wcs_polygon(flt_file, extension=ext)
            line = "polygon(%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f)" % (
                regX[0], regY[0], regX[1], regY[1], regX[2], regY[2], regX[3],
                regY[3])

            RAcenters[i] += np.mean(regX) / len(extensions)
            DECcenters[i] += np.mean(regY) / len(extensions)

            fp.write(line + ' # color=magenta\n')

    ##### Text label with ASN filename
    fp.write('# text(%10.6f,%10.6f) text={%s} font="Helvetica 14 normal" color=magenta\n' \
        %(np.mean(RAcenters),np.mean(DECcenters),
          asn_file.split('_asn.fits')[0]))
    fp.close()
    #print '3D-HST / ASN_REGION: %s\n' %(output_file)
    threedhst.showMessage('Create region file, %s.' % output_file)
コード例 #13
0
ファイル: shifts.py プロジェクト: kewhitaker/threedhst
def make_grism_shiftfile(asn_direct, asn_grism):
    """
make_grism_shiftfile(asn_direct, grism_direct)
    
    Make a shiftfile for grism exposures to match
    corresponding direct images
    """
    from threedhst.utils import ASNFile

    ROOT_DIRECT = asn_direct.split("_asn.fits")[0]  # .lower()
    ROOT_GRISM = asn_grism.split("_asn.fits")[0]  # .lower()
    #### Read shiftfile and ASN table
    sf = ShiftFile(ROOT_DIRECT + "_shifts.txt")
    asn = ASNFile(asn_grism)

    if sf.nrows == len(asn.exposures):
        #### Assume one direct image for each grism images, so just
        #### change the image names in the shiftfile to the grism exposures
        for i, exp in enumerate(asn.exposures):
            sf.images[i] = exp + "_flt.fits"
    else:
        #### Have different number of grism and direct images.  Just use the
        #### shifts/rotations for the first direct image
        xs = sf.xshift[0]
        ys = sf.yshift[0]
        rot = sf.rotate[0]
        scl = sf.scale[0]
        sf.images = []
        sf.xshift = []
        sf.yshift = []
        sf.rotate = []
        sf.scale = []
        for i, exp in enumerate(asn.exposures):
            sf.images.append(exp + "_flt.fits")
            sf.xshift.append(xs)
            sf.yshift.append(ys)
            sf.rotate.append(rot)
            sf.scale.append(scl)

        sf.nrows = len(asn.exposures)

    #### Write the new shiftfile
    sf.write(ROOT_GRISM + "_shifts.txt")

    # print "\n3DHST.shifts.make_grism_shiftfile: %s_shifts.txt\n" %ROOT_GRISM
    threedhst.showMessage("Making grism shiftfile, %s_shifts.txt" % ROOT_GRISM)
コード例 #14
0
ファイル: catIO.py プロジェクト: kewhitaker/threedhst
 def __add__(self, newcat, prepend='x_'):
     """
     Append columns of 'newcat' gTable object to the table.  Add the 
     "prepend" string to column names that already exist in the table.
     """
     if len(self) != len(newcat):
         threedhst.showMessage('Number of elements in %s and %s don\'t match.' %(self.filename, newcat.filename))
     
     cnew = self.copy()
     for column in newcat.columns:
         col = newcat[column]
         if column in cnew.columns:
             col.name = prepend + col.name
         
         cnew.add_column(col)
     
     return cnew
コード例 #15
0
ファイル: shifts.py プロジェクト: themiyan/threedhst
def make_grism_shiftfile(asn_direct, asn_grism):
    """
make_grism_shiftfile(asn_direct, grism_direct)
    
    Make a shiftfile for grism exposures to match
    corresponding direct images
    """
    from threedhst.utils import ASNFile
    ROOT_DIRECT = asn_direct.split('_asn.fits')[0]  #.lower()
    ROOT_GRISM = asn_grism.split('_asn.fits')[0]  #.lower()
    #### Read shiftfile and ASN table
    sf = ShiftFile(ROOT_DIRECT + '_shifts.txt')
    asn = ASNFile(asn_grism)

    if sf.nrows == len(asn.exposures):
        #### Assume one direct image for each grism images, so just
        #### change the image names in the shiftfile to the grism exposures
        for i, exp in enumerate(asn.exposures):
            sf.images[i] = exp + '_flt.fits'
    else:
        #### Have different number of grism and direct images.  Just use the
        #### shifts/rotations for the first direct image
        xs = sf.xshift[0]
        ys = sf.yshift[0]
        rot = sf.rotate[0]
        scl = sf.scale[0]
        sf.images = []
        sf.xshift = []
        sf.yshift = []
        sf.rotate = []
        sf.scale = []
        for i, exp in enumerate(asn.exposures):
            sf.images.append(exp + '_flt.fits')
            sf.xshift.append(xs)
            sf.yshift.append(ys)
            sf.rotate.append(rot)
            sf.scale.append(scl)

        sf.nrows = len(asn.exposures)

    #### Write the new shiftfile
    sf.write(ROOT_GRISM + '_shifts.txt')

    #print "\n3DHST.shifts.make_grism_shiftfile: %s_shifts.txt\n" %ROOT_GRISM
    threedhst.showMessage('Making grism shiftfile, %s_shifts.txt' % ROOT_GRISM)
コード例 #16
0
ファイル: plotting.py プロジェクト: gbrammer/unicorn
def scatter_annotate(x, y, labels, xtol=None, ytol=None, ax=None,*args, **kwargs):
    if ax is None:
        axi = plt
    else:
        axi = ax
        
    if len(labels) != len(x):
        threedhst.showMessage('`x`, `y`, and `labels` inputs must be same size', warn=True)
        return False
    
    if not isinstance(labels, list):
        labels = list(labels)
        
    plt.scatter(x, y, *args, **kwargs)
    af = AnnoteFinder(x, y, labels, xtol=xtol, ytol=ytol, axis=ax)
    plt.connect('button_press_event', af)
    
    return af
コード例 #17
0
ファイル: regions.py プロジェクト: themiyan/threedhst
def fits_regions(fits_list, output_file='list.reg', force_extension=None):
    ##### Output file
    fp = open(output_file, 'w')
    fp.write('fk5\n')  ### WCS coordinates

    NEXP = len(fits_list)

    ##### Loop through exposures and get footprints
    for i, exp_root in enumerate(fits_list):
        flt_file = threedhst.utils.find_fits_gz(exp_root, hard_break=True)

        #head = pyfits.getheader(exp_root.lower()+'_flt.fits')
        head = pyfits.getheader(flt_file)
        if (head.get('INSTRUME') == 'ACS') | ('UVIS' in head.get('APERTURE')):
            extensions = [1, 4]
        else:
            extensions = [1]

        if force_extension is not None:
            extensions = force_extension

        RAcenters, DECcenters = 0., 0.

        for ext in extensions:
            regX, regY = wcs_polygon(flt_file, extension=ext)
            line = "polygon(%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f,%10.6f)" % (
                regX[0], regY[0], regX[1], regY[1], regX[2], regY[2], regX[3],
                regY[3])
            fp.write(line + ' # color=magenta\n')

            RAcenters += np.mean(regX) / len(extensions)
            DECcenters += np.mean(regY) / len(extensions)

            ##### Text label with ASN filename
            fp.write('# text(%10.6f,%10.6f) text={%s} font="Helvetica 9 normal" color=magenta\n' \
                %(RAcenters,DECcenters, exp_root.split('.fits')[0]))

    fp.close()
    threedhst.showMessage('Create region file, %s.' % output_file)
コード例 #18
0
def remove_visit_sky(asn_file='GDN12-G102_asn.fits',
                     list=['zodi_G102_clean.fits', 'excess_G102_clean.fits'],
                     add_constant=False,
                     column_average=True,
                     mask_grow=18,
                     flat_correct=True):
    """
    Require that all exposures in a visit have the same zodi component.
    """
    from scipy.linalg import lstsq
    import scipy.optimize
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits

    import copy

    import threedhst.grism_sky as bg

    asn = threedhst.utils.ASNFile(asn_file)

    flt = pyfits.open('%s_flt.fits' % (asn.exposures[0]))
    bg.set_grism_flat(grism=flt[0].header['FILTER'], verbose=True)

    if flat_correct:
        flat = bg.flat * 1.
    else:
        flat = bg.flat * 0. + 1

    data = []
    whts = []
    masks = []
    for exp in asn.exposures:
        flt = pyfits.open('%s_flt.fits' % (exp))
        segfile = '%s_flt.seg.fits' % (exp)
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=18) == 0
        dq_ok = (flt[3].data & (4 + 32 + 16 + 512 + 2048 + 4096)) == 0
        #
        flat_corr = flt[1].data * flat
        mask = seg_mask & dq_ok
        mask &= (flat_corr < np.percentile(flat_corr[mask], 98)) & (
            flt[2].data > 0) & (flat_corr > np.percentile(flat_corr[mask], 1))
        #
        data.append(flat_corr.flatten())
        whts.append(1 / flt[2].data.flatten()**2)
        masks.append(mask.flatten())

    data = np.array(data)
    whts = np.array(whts)
    masks = np.array(masks)

    #### Read in the master skies
    ims = []
    skies = copy.deepcopy(list)

    for sky in skies:
        ims.append(
            pyfits.open(os.getenv('THREEDHST') + '/CONF/' +
                        sky)[0].data.flatten())

    if add_constant:
        ims.append(flt[1].data.flatten() * 0. + 1)
        skies.append('Constant')

    ims = np.array(ims)

    #### Do the fit
    tol = 1.49e-8  # not sure what this controls

    p0 = np.ones((ims.shape[0] - 1) * len(asn.exposures) + 1)
    popt = scipy.optimize.leastsq(bg.obj_lstsq_visit,
                                  p0,
                                  args=(data, ims, whts, masks),
                                  full_output=True,
                                  ftol=tol / 1000.,
                                  xtol=tol / 1000.)
    xcoeff = popt[0]

    sh_temp = ims.shape
    logstr = 'Master grism sky: %s\n\n FLT   %s\n' % (asn_file,
                                                      '  '.join(skies))

    for i in range(len(asn.exposures)):
        coeff = np.zeros(sh_temp[0])
        coeff[0] = xcoeff[0]
        coeff[1:] = xcoeff[1 + i * (sh_temp[0] - 1):1 + (i + 1) *
                           (sh_temp[0] - 1)]
        bg_model = np.dot(coeff, ims).reshape((1014, 1014))
        logstr += '%s  %s\n' % (asn.exposures[i], ''.join(
            [' %9.4f' % (c) for c in coeff]))
        flt = pyfits.open('%s_flt.fits' % (asn.exposures[i]), mode='update')
        flt[1].data = flt[1].data * flat - bg_model
        for j in range(sh_temp[0]):
            if 'GSKY%02d' % (j) in flt[0].header:
                flt[0].header['GSKY%02d' % (j)] += coeff[j]
            else:
                flt[0].header['GSKY%02d' % (j)] = (coeff[j], 'Master sky: %s' %
                                                   (skies[j]))
        #
        flt[1].header['MDRIZSKY'] = 0.
        if 'SKYFLAT' in flt[0].header.keys():
            flt[0].header['SKYFLAT'] = (flat_correct
                                        | flt[0].header['SKYFLAT'],
                                        'Direct image flat applied')
        else:
            flt[0].header['SKYFLAT'] = (flat_correct,
                                        'Direct image flat applied')
        flt.flush()

    threedhst.showMessage(logstr)

    if column_average:
        #for iter in range(2):
        grism_sky_column_average(asn_file=asn_file, mask_grow=mask_grow)
コード例 #19
0
ファイル: go_acs.py プロジェクト: gbrammer/unicorn
def process_acs_pair(asn_direct_file='ib3706050_asn.fits',
                     asn_grism_file='ib3706060_asn.fits',
                       field = 'COSMOS',
                       ALIGN_IMAGE='../ACS/h_nz_sect*img.fits',
                       ALIGN_EXTENSION=0,
                       SKIP_GRISM=False,
                       adjust_targname=True,
                       align_geometry='shift',
                       PATH_TO_RAW='../RAW',
                       get_shift=True,
                       TWEAKSHIFTS_ONLY=False,
                       FLC=True): 

    """
    Does the basic processing for ACS F814W and G800L pointings: background subtraction, allignment and drizzlign.
    """

    import threedhst
    import threedhst.prep_flt_files
    from threedhst.prep_flt_files import make_targname_asn
    
    #### Copy corrected FLT files to . 
    asn = threedhst.utils.ASNFile(asn_direct_file)
    for exp in asn.exposures:
        print exp
        os.system('rm %s_flt.fits' %(exp))
        if FLC:
            os.system('cp ../RAW/%s_flc.fits %s_flt.fits' %(exp, exp))
        else:
            os.system('cp ../FIXED/%s_flt.fits . ' %(exp))
    #
    asn = threedhst.utils.ASNFile(asn_grism_file)
    for exp in asn.exposures:
        print exp
        os.system('rm %s_flt.fits' %(exp))
        if FLC:
            os.system('cp ../RAW/%s_flc.fits %s_flt.fits' %(exp, exp))
        else:
            os.system('cp ../FIXED/%s_flt.fits . ' %(exp))

    #DIRECT REDUCTION
    ROOT_DIRECT = asn_direct_file.split('_asn.fits')[0]
        
    from threedhst.prep_flt_files import make_targname_asn
    
    #this makes new asn.fits files but with ACS the names start with ANY
    #must add an optional tag to replace ANY with the field name
    if (asn_direct_file is not None) & adjust_targname:
        asn_direct_file = make_targname_asn(asn_direct_file,field=field, ext='flc')
    
    if (asn_grism_file is not None) & adjust_targname:
        asn_grism_file = make_targname_asn(asn_grism_file,field=field, ext='flc')

    #run = threedhst.prep_flt_files.MultidrizzleRun((asn_direct_file.split('_asn.fits')[0]).upper())
    threedhst.shifts.run_tweakshifts(asn_direct_file, verbose=True)
    threedhst.prep_flt_files.startMultidrizzle(asn_direct_file, use_shiftfile=True,
        skysub=True,
        final_scale=0.05, pixfrac=1, driz_cr=True,
        updatewcs=True, clean=True, median=True)

    for i,exp in enumerate(asn.exposures):
        asn_mask = asn.exposures[i]+'_flt.fits.mask.reg'
        print asn_mask
        if os.path.exists(asn_mask):
            threedhst.showMessage("Apply ASN mask: %s" %(asn_mask))
            threedhst.regions.apply_dq_mask(asn.exposures[i]+'_flt.fits', extension=3, 
                    mask_file = asn_mask)

    threedhst.shifts.refine_shifts(ROOT_DIRECT=asn_direct_file.split('_as')[0].upper(),
              ALIGN_IMAGE=ALIGN_IMAGE, 
              ALIGN_EXTENSION = ALIGN_EXTENSION,
              fitgeometry=align_geometry, clean=True)
    
    unicorn.go_acs.testing_f814w_background(asn_direct_file)

    SCALE = 0.06
    PIXFRAC=1.0

    threedhst.prep_flt_files.startMultidrizzle(asn_direct_file, use_shiftfile=True,
        skysub=True,
        final_scale=SCALE, pixfrac=PIXFRAC, driz_cr=False,
        updatewcs=True, clean=True, median=False)

    #GRISM REDUCTION

    threedhst.shifts.make_grism_shiftfile(asn_direct_file, asn_grism_file)

    threedhst.prep_flt_files.startMultidrizzle(asn_grism_file, use_shiftfile=True,
        skysub=True,
        final_scale=SCALE, pixfrac=PIXFRAC, driz_cr=True,
        updatewcs=True, clean=False, median=True)
        
    unicorn.go_acs.testing_g800l_background(asn_grism_file)

    threedhst.prep_flt_files.startMultidrizzle(asn_grism_file, use_shiftfile=True,
skysub=True,
        final_scale=SCALE, pixfrac=PIXFRAC, driz_cr=True,
        updatewcs=True, clean=False, median=True)
コード例 #20
0
def prep_direct_grism_pair(direct_asn='goodss-34-F140W_asn.fits', grism_asn='goodss-34-G141_asn.fits', radec=None, raw_path='../RAW/', mask_grow=18, scattered_light=False, final_scale=None, skip_direct=False, ACS=False, jump=False, order=2, get_shift=True, align_threshold=20, column_average=True, sky_iter=3):
    """
    Process both the direct and grism observations of a given visit
    """
    import threedhst.prep_flt_astrodrizzle as prep
    import drizzlepac
    from stwcs import updatewcs
    
    import time
    
    t0 = time.time()
    
    #direct_asn='goodss-34-F140W_asn.fits'; grism_asn='goodss-34-G141_asn.fits'; radec=None; raw_path='../RAW/'
    #radec = os.getenv('THREEDHST') + '/ASTRODRIZZLE_FLT/Catalog/goodss_radec.dat'
    
    ################################
    #### Direct image processing
    ################################
    
    #### xx add astroquery 2MASS/SDSS workaround for radec=None
    
    if not skip_direct:

        #### Get fresh FLTS from ../RAW/
        asn = threedhst.utils.ASNFile(direct_asn)
        if ACS:
            for exp in asn.exposures:
                print 'cp %s/%s_flc.fits.gz .' %(raw_path, exp)
                os.system('cp %s/%s_flc.fits.gz .' %(raw_path, exp))
                os.system('gunzip %s_flc.fits.gz' %(exp))
        else:
            threedhst.process_grism.fresh_flt_files(direct_asn, from_path=raw_path)
        
        if (not ACS):
            #### Subtract WFC3/IR direct backgrounds
            prep.subtract_flt_background(root=direct_asn.split('_asn')[0], scattered_light=scattered_light, order=order)
            #### Flag IR CRs again within runTweakReg
        
        #### Run TweakReg
        if (radec is None) & (not ACS):
            drizzlepac.astrodrizzle.AstroDrizzle(direct_asn, clean=True, final_scale=None, final_pixfrac=0.8, context=False, final_bits=576, preserve=False, driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7') # ,
        else:
            if get_shift:
                prep.runTweakReg(asn_file=direct_asn, master_catalog=radec, final_scale=None, ACS=ACS, threshold=align_threshold)
    
        #### Subtract background of direct ACS images
        if ACS:
            for exp in asn.exposures:
                flc = pyfits.open('%s_flc.fits' %(exp), mode='update')
                for ext in [1,4]:
                    threedhst.showMessage('Subtract background from %s_flc.fits[%d] : %.4f' %(exp, ext, flc[ext].header['MDRIZSKY']))
                    flc[ext].data -= flc[ext].header['MDRIZSKY']
                    flc[ext].header['MDRIZSK0'] = flc[ext].header['MDRIZSKY']
                    flc[ext].header['MDRIZSKY'] = 0.
                #
                flc.flush()
        else:
            pass
            #### Do this later, gives segfaults here???
            #prep.subtract_flt_background(root=direct_asn.split('_asn')[0], scattered_light=scattered_light)
            #### Flag CRs again on BG-subtracted image
            #drizzlepac.astrodrizzle.AstroDrizzle(direct_asn, clean=True, final_scale=None, final_pixfrac=0.8, context=False, final_bits=576, preserve=False, driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7') # ,
        
    ################################
    #### Grism image processing
    ################################
    
    if grism_asn:
        asn = threedhst.utils.ASNFile(grism_asn)
        if ACS:
            for exp in asn.exposures:
                print 'cp %s/%s_flc.fits.gz .' %(raw_path, exp)
                os.system('cp %s/%s_flc.fits.gz .' %(raw_path, exp))
                os.system('gunzip %s_flc.fits.gz' %(exp))
                updatewcs.updatewcs('%s_flc.fits' %(exp))

            prep.copy_adriz_headerlets(direct_asn=direct_asn, grism_asn=grism_asn, ACS=True)
            prep.subtract_acs_grism_background(asn_file=grism_asn, final_scale=None)
        else:
            #### Remove the sky and flag CRs
            ## with mask from rough zodi-only subtraction
            prep.subtract_grism_background(asn_file=grism_asn, PATH_TO_RAW='../RAW/', final_scale=None, visit_sky=True, column_average=False, mask_grow=mask_grow, first_run=True)
            ## Redo making mask from better combined image
            prep.subtract_grism_background(asn_file=grism_asn, PATH_TO_RAW='../RAW/', final_scale=final_scale, visit_sky=True, column_average=column_average, mask_grow=mask_grow, first_run=False, sky_iter=sky_iter)
                        
            #### Copy headers from direct images
            if radec is not None:
                prep.copy_adriz_headerlets(direct_asn=direct_asn, grism_asn=grism_asn, ACS=False)
                #### Run CR rejection with final shifts
                drizzlepac.astrodrizzle.AstroDrizzle(grism_asn, clean=True, skysub=False, final_wcs=True, final_scale=final_scale, final_pixfrac=0.8, context=False, final_bits=576, driz_sep_bits=576, preserve=False, driz_cr_snr='8.0 5.0', driz_cr_scale='2.5 0.7') # driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7')
                
    if not grism_asn:
        t1 = time.time()
        threedhst.showMessage('direct: %s\n\nDone (%d s).' %(direct_asn, int(t1-t0)))
    else:
        t1 = time.time()
        threedhst.showMessage('direct: %s\ngrism: %s\n\nDone (%d s).' %(direct_asn, grism_asn, int(t1-t0)))
コード例 #21
0
ファイル: shifts.py プロジェクト: kewhitaker/threedhst
def refine_shifts(
    ROOT_DIRECT="f160w",
    ALIGN_IMAGE="../../ACS/h_sz*drz_img.fits",
    fitgeometry="shift",
    clean=True,
    ALIGN_EXTENSION=0,
    shift_params=None,
    toler=3,
    maxtoler=5,
    align_sdss_ds9=False,
    verbose=False,
):
    """
refine_shifts(ROOT_DIRECT='f160w',
              ALIGN_IMAGE='../../ACS/h_sz*drz_img.fits',
              fitgeometry='shift', clean=True)
                
    Refine shifts by catalog matching an input multidrizzle image, 
    ROOT_DIRECT+'_drz.fits' to one or more alignment images
    """

    run = threedhst.prep_flt_files.MultidrizzleRun(ROOT_DIRECT.upper())

    ## radius for match is 2**toler.  Make it larger if fit comes out bad
    # toler, maxtoler = 3, 5
    iter, MAXIT = 0, 5
    xrms, yrms = 100, 100
    if shift_params is not None:
        xshift, yshift, rot, scale = shift_params
        threedhst.showMessage("Using specified DRZ-frame shifts: %f %f %f %f" % (xshift, yshift, rot, scale))
    else:
        threedhst.showMessage("Aligning WCS to %s (%s)" % (threedhst.options["ALIGN_IMAGE"], fitgeometry))
        while ((xrms > 1) | (yrms > 1)) & (toler <= maxtoler) & (iter < MAXIT):
            iter = iter + 1
            xshift, yshift, rot, scale, xrms, yrms = threedhst.shifts.align_to_reference(
                ROOT_DIRECT,
                ALIGN_IMAGE,
                fitgeometry=fitgeometry,
                clean=clean,
                ALIGN_EXTENSION=ALIGN_EXTENSION,
                toler=toler,
                skip_swarp=(toler > 3),
                align_sdss_ds9=align_sdss_ds9,
                verbose=verbose,
            )
            toler += 1

    #### shifts measured in DRZ frame.  Translate to FLT frame
    drz = pyfits.open(ROOT_DIRECT + "_drz.fits")
    # alpha = (180.-drz[1].header['PA_APER'])/360.*2*np.pi
    #### Get reference angle from first image in the ASN file
    asn = threedhst.utils.ASNFile(ROOT_DIRECT + "_asn.fits")
    alpha = (180.0 - pyfits.getheader(asn.exposures[0] + "_flt.fits", 1)["PA_APER"]) / 360.0 * 2 * np.pi

    xsh = (xshift * np.cos(alpha) - yshift * np.sin(alpha)) * np.float(run.scl)
    ysh = (xshift * np.sin(alpha) + yshift * np.cos(alpha)) * np.float(run.scl)

    print "Final shift:", xsh, ysh, drz[1].header["PA_APER"]
    fp = open(ROOT_DIRECT + "_align.info", "w")
    fp.write("%s %8.3f %8.3f %8.3f\n" % (ALIGN_IMAGE, xsh, ysh, rot))
    fp.close()

    #### Read the shiftfile
    shiftF = threedhst.shifts.ShiftFile(ROOT_DIRECT + "_shifts.txt")

    #### Apply the alignment shifts to the shiftfile
    shiftF.xshift = list(np.array(shiftF.xshift) - xsh)
    shiftF.yshift = list(np.array(shiftF.yshift) - ysh)
    shiftF.rotate = list((np.array(shiftF.rotate) + rot) % 360)
    shiftF.scale = list(np.array(shiftF.scale) * scale)

    shiftF.write(ROOT_DIRECT + "_shifts.txt")
コード例 #22
0
    def helper_read(self,
                    filename,
                    format=None,
                    check_FITS=True,
                    save_FITS=False,
                    *args,
                    **kwargs):
        """
        Wrapper around `astropy.table.Table.read() for better
        auto-sensing of ascii table formats
        """
        if not os.path.exists(filename):
            threedhst.showMessage('File %s not found.' % (filename), warn=True)
            return False

        if format is not None:
            data = table_base.read(filename, format=format, *args, **kwargs)
            data.input_format = {'format': format}
            data.filename = filename
            return data

        if filename.lower().endswith('.fits'):
            format = 'fits'
        else:
            ### Try to read the ".FITS" version first
            if check_FITS:
                #threedhst.showMessage('read', warn=True)
                status = self.load_FITS(filename)
                if status:
                    return status

            if format is None:
                try:
                    t = pyfits.open(filename)
                    format = 'fits'
                except:
                    print('Try ascii:')
                    line = open(filename).readline()
                    if line.strip().startswith('#'):
                        if line.split()[1].isdigit():
                            format = 'ascii.sextractor'
                        else:
                            format = 'ascii.commented_header'
                    else:
                        format = 'ascii.basic'

                    data = self.read(filename, format=format, *args, **kwargs)
                    data.input_format = {'format': format}
                    data.filename = filename

                    #### Split "APER" columns
                    if format == 'ascii.sextractor':
                        combine_columns = [
                            'FLUX_APER', 'FLUXERR_APER', 'MAG_APER',
                            'MAGERR_APER', 'FLUX_RADIUS'
                        ]
                        for base_col in combine_columns:
                            if '%s2' % (base_col) in data.colnames:
                                continue

                            ncol = 0
                            for col in data.colnames:
                                if base_col in col:
                                    ncol += 1

                            if ncol == 0:
                                continue

                            dtype = data[base_col].dtype
                            out = np.zeros((len(data), ncol), dtype=dtype)
                            for i in range(ncol):
                                if i == 0:
                                    incol = base_col
                                else:
                                    incol = '%s_%d' % (base_col, i)

                                out[:, i] = data[incol]
                                data.remove_column(incol)

                            #data.remove_column(base_col)
                            data.add_column(Column(name=base_col, data=out))

                    if save_FITS:
                        #threedhst.showMessage('write', warn=True)
                        data.write_FITS()

        if format == 'fits':
            t = pyfits.open(filename)
            if t[0].header['EXTEND']:
                if 'EXTNAME' in t[1].header:
                    if t[1].header['EXTNAME'] == 'LDAC_IMHEAD':
                        hdu = 2
                    else:
                        hdu = 1
                else:
                    hdu = 1
            else:
                hdu = 0

            data = self.read(filename, format='fits', hdu=hdu, *args, **kwargs)
            data.input_format = {'format': 'fits', 'HDU': hdu}

        data.filename = filename
        return data
コード例 #23
0
ファイル: interp.py プロジェクト: gbrammer/unicorn
def test():

    import interp_c
    import time
    import scipy
    import threedhst
    import numpy as np
    
    N = int(1.e6)
    
    xfull = np.arange(0,N+1,1)*1.
    #yfull = np.sin(xfull/(N/1239.)*2*np.pi)+1
    yfull = np.sin(xfull/np.pi/2/20)+0.2

    # coeffs = np.random.random(size=12)*5
    # yfull = scipy.polyval(coeffs, xfull)
    
    xint = np.arange(0,N+1,N/100)*1.
    
    tstart = time.time()
    denom = np.trapz(yfull,xfull)
    
    tstart = time.time()
    yint_0 = np.interp(xint, xfull, yfull)
    t0 = time.time()
    print 'Linear           : %.3f   (%.4e)' %(t0-tstart, np.trapz(yint_0, xint)/denom-1)

    yint_x = interp_c.interp_c(xint, xfull, yfull)
    tx = time.time()
    print 'Linear(c)        : %.3f   (%.4e)' %(tx-t0, np.trapz(yint_x, xint)/denom-1)
    
    xreverse = xint[::-1]
    yint_y = interp_c.interp_c(xreverse, xfull, yfull, assume_sorted=0)
    ty = time.time()
    print 'Linear(c) rev    : %.3f   (%.4e)' %(ty-tx, np.trapz(yint_y, xint)/denom-1)
    
    yint_1 = threedhst.utils.interp_conserve(xint, xfull, yfull)
    t1 = time.time()
    print 'Conserve         : %.3f   (%.4e)' %(t1-ty, np.trapz(yint_1, xint)/denom-1)
    
    yint_2 = interp_c.interp_conserve(xint, xfull, yfull)
    t2 = time.time()
    print 'Conserve (Cython): %.3f   (%.4e)' %(t2-t1, np.trapz(yint_2, xint)/denom-1)

    yint_3 = interp_c.interp_conserve_c(xint, xfull, yfull)
    t3 = time.time()
    print 'Conserve (more c): %.3f   (%.4e)' %(t3-t2, np.trapz(yint_3, xint)/denom-1)

    yint_4 = threedhst.utils.interp_conserve_c(xint, xfull, yfull)
    t4 = time.time()
    print 'Inline c         : %.3f   (%.4e)' %(t4-t3, np.trapz(yint_4, xint)/denom-1)

    #### Test interpolation
    threedhst.showMessage('Interpolation')
    
    #### Faster while n(int)/n(full) < 1./50
    
    xint = xfull[1000:-1000:40]

    tstart = time.time()    
    yint = np.interp(xint, xfull, yfull, left=0., right=0.)
    t0 = time.time()
    print 'Python         : %.4f' %(t0-tstart)

    yint1 = interp_c.interp_c(xint, xfull, yfull, extrapolate=0.)
    t1 = time.time()
    print 'Cython rewrite : %.4f   (%.2e)' %(t1-t0, np.sum((yint1-yint)**2))
    
    #### Test midpoint definition --- slices work better than by hand

    threedhst.showMessage('Midpoint')
    xmid = xfull
    
    tstart = time.time()
    midpoint = (xmid[1:]+xmid[:-1])/2.
    midpoint = np.append(midpoint, np.array([xmid[0],xmid[-1]]))
    midpoint = midpoint[np.argsort(midpoint)]
    t0 = time.time()
    print 'Python      :  %.3f  %.2e'   %(t0-tstart, np.sum((midpoint-midpoint)**2))

    midpoint_c1 = interp_c.midpoint(xmid)
    t1 = time.time()
    print 'Cython      :  %.3f  %.2e'   %(t1-t0, np.sum((midpoint_c1-midpoint)**2))

    midpoint_c2 = interp_c.midpoint_c(xmid, N+1)
    t2 = time.time()
    print 'Cython (opt):  %.3f  %.2e'   %(t2-t1, np.sum((midpoint_c2-midpoint)**2))
コード例 #24
0
ファイル: grism_sky.py プロジェクト: AyushYadav/threedhst
def grism_sky_column_average_GP(asn_file='GDN12-G102_asn.fits', mask_grow=8):
    """
    Remove column-averaged residuals from grism exposures, smooth with Gaussian Processes
    """
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits
    from sklearn.gaussian_process import GaussianProcess
    
    asn = threedhst.utils.ASNFile(asn_file)
            
    for k in range(len(asn.exposures)):
        #### 1D column averages
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[k]), mode='update')
        segfile = '%s_flt.seg.fits' %(asn.exposures[k])
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=mask_grow) == 0
        dq_ok = (flt[3].data & (4+32+16+512+2048+4096)) == 0
        
        mask = seg_mask & dq_ok & (flt[2].data > 0)
        
        threedhst.showMessage('Remove column average (GP): %s' %(asn.exposures[k]))
        
        #### Iterative clips on percentile
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 98)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 2))
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 84)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 16))
                    
        xmsk = np.arange(1014)

        masked = flt[1].data*1
        masked[~mask] = np.nan
        yres = np.zeros(1014)
        yrms = yres*0.
        for i in range(1014):
            # ymsk = mask[:,i]
            # yres[i] = np.median(flt[1].data[ymsk,i])
            ymsk = masked[:,i]
            #ymsk = masked[:,np.maximum(i-10,0):i+10]
            #yres[i] = np.median(ymsk[np.isfinite(ymsk)])
            ok = np.isfinite(ymsk)
            ymsk[(ymsk > np.percentile(ymsk[ok], 84)) | (ymsk < np.percentile(ymsk[ok], 16))] = np.nan
            msk = np.isfinite(ymsk)
            yres[i] = np.mean(ymsk[msk])
            yrms[i] = np.std(ymsk[msk])/np.sqrt(msk.sum())
            
        #
        yok = np.isfinite(yres)
        if 'GSKY00' in flt[0].header.keys():
            bg_sky = flt[0].header['GSKY00']
        else:
            bg_sky = 1
            
        gp = GaussianProcess(regr='constant', corr='squared_exponential', theta0=8,
                             thetaL=7, thetaU=12,
                             nugget=(yrms/bg_sky)[yok][::1]**2,
                             random_start=10, verbose=True, normalize=True) #, optimizer='Welch')
        #
        gp.fit(np.atleast_2d(xmsk[yok][::1]).T, yres[yok][::1]+bg_sky)
        y_pred, MSE = gp.predict(np.atleast_2d(xmsk).T, eval_MSE=True)
        gp_sigma = np.sqrt(MSE)
        
        resid = threedhst.utils.medfilt(yres, 41)

        flt[1].data -= y_pred-bg_sky
        flt.flush()
        
        #flt.writeto(flt.filename(), clobber=True)
        
        #plt.plot(yres_sm)
        
        ### Make figure
        from matplotlib.figure import Figure
        from matplotlib.backends.backend_agg import FigureCanvasAgg
        
        fig = Figure(figsize=[6,4], dpi=100)

        fig.subplots_adjust(wspace=0.25,hspace=0.02,left=0.15,
                            bottom=0.08,right=0.97,top=0.92)

        ax = fig.add_subplot(111)
        ax.set_title(flt.filename())

        ax.plot(yres, color='black', alpha=0.3)
        ax.plot(y_pred-bg_sky, color='red', linewidth=2, alpha=0.7)
        ax.fill_between(xmsk, y_pred-bg_sky + gp_sigma, y_pred-bg_sky - gp_sigma, color='red', alpha=0.3)
        
        ax.set_xlim(0,1014)
        ax.set_xlabel('x pix'); ax.set_ylabel('BG residual (e/s)')
        fig.tight_layout(pad=0.2)
        
        canvas = FigureCanvasAgg(fig)
        canvas.print_figure(flt.filename().split('.fits')[0] + '.column.png', dpi=100, transparent=False)
コード例 #25
0
ファイル: grism_sky.py プロジェクト: irhamta/eazy-photoz
def grism_sky_column_average_GP(asn_file='GDN12-G102_asn.fits', mask_grow=8):
    """
    Remove column-averaged residuals from grism exposures, smooth with Gaussian Processes
    """
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits
    from sklearn.gaussian_process import GaussianProcess
    
    asn = threedhst.utils.ASNFile(asn_file)
            
    for k in range(len(asn.exposures)):
        #### 1D column averages
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[k]), mode='update')
        segfile = '%s_flt.seg.fits' %(asn.exposures[k])
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=mask_grow) == 0
        dq_ok = (flt[3].data & (4+32+16+512+2048+4096)) == 0
        
        mask = seg_mask & dq_ok & (flt[2].data > 0)
        
        threedhst.showMessage('Remove column average (GP): %s' %(asn.exposures[k]))
        
        #### Iterative clips on percentile
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 98)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 2))
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 84)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 16))
                    
        xmsk = np.arange(1014)

        masked = flt[1].data*1
        masked[~mask] = np.nan
        yres = np.zeros(1014)
        yrms = yres*0.
        for i in range(1014):
            # ymsk = mask[:,i]
            # yres[i] = np.median(flt[1].data[ymsk,i])
            ymsk = masked[:,i]
            #ymsk = masked[:,np.maximum(i-10,0):i+10]
            #yres[i] = np.median(ymsk[np.isfinite(ymsk)])
            ok = np.isfinite(ymsk)
            ymsk[(ymsk > np.percentile(ymsk[ok], 84)) | (ymsk < np.percentile(ymsk[ok], 16))] = np.nan
            msk = np.isfinite(ymsk)
            yres[i] = np.mean(ymsk[msk])
            yrms[i] = np.std(ymsk[msk])/np.sqrt(msk.sum())
            
        #
        yok = np.isfinite(yres)
        if 'GSKY00' in list(flt[0].header.keys()):
            bg_sky = flt[0].header['GSKY00']
        else:
            bg_sky = 1
            
        gp = GaussianProcess(regr='constant', corr='squared_exponential', theta0=8,
                             thetaL=7, thetaU=12,
                             nugget=(yrms/bg_sky)[yok][::1]**2,
                             random_start=10, verbose=True, normalize=True) #, optimizer='Welch')
        #
        gp.fit(np.atleast_2d(xmsk[yok][::1]).T, yres[yok][::1]+bg_sky)
        y_pred, MSE = gp.predict(np.atleast_2d(xmsk).T, eval_MSE=True)
        gp_sigma = np.sqrt(MSE)
        
        resid = threedhst.utils.medfilt(yres, 41)

        flt[1].data -= y_pred-bg_sky
        flt.flush()
        
        #flt.writeto(flt.filename(), clobber=True)
        
        #plt.plot(yres_sm)
        
        ### Make figure
        from matplotlib.figure import Figure
        from matplotlib.backends.backend_agg import FigureCanvasAgg
        
        fig = Figure(figsize=[6,4], dpi=100)

        fig.subplots_adjust(wspace=0.25,hspace=0.02,left=0.15,
                            bottom=0.08,right=0.97,top=0.92)

        ax = fig.add_subplot(111)
        ax.set_title(flt.filename())

        ax.plot(yres, color='black', alpha=0.3)
        ax.plot(y_pred-bg_sky, color='red', linewidth=2, alpha=0.7)
        ax.fill_between(xmsk, y_pred-bg_sky + gp_sigma, y_pred-bg_sky - gp_sigma, color='red', alpha=0.3)
        
        ax.set_xlim(0,1014)
        ax.set_xlabel('x pix'); ax.set_ylabel('BG residual (e/s)')
        fig.tight_layout(pad=0.2)
        
        canvas = FigureCanvasAgg(fig)
        canvas.print_figure(flt.filename().split('.fits')[0] + '.column.png', dpi=100, transparent=False)
コード例 #26
0
ファイル: catIO.py プロジェクト: kewhitaker/threedhst
    def helper_read(self, filename, format=None, check_FITS=True, save_FITS=False, *args, **kwargs):
        """
        Wrapper around `astropy.table.Table.read() for better
        auto-sensing of ascii table formats
        """
        if not os.path.exists(filename):
            threedhst.showMessage('File %s not found.' %(filename), warn=True)
            return False
        
        if format is not None:
            data = table_base.read(filename, format=format, *args, **kwargs)
            data.input_format = {'format':format}
            data.filename = filename
            return data

        if filename.lower().endswith('.fits'):
            format = 'fits'
        else:
            ### Try to read the ".FITS" version first
            if check_FITS:
                #threedhst.showMessage('read', warn=True)
                status = self.load_FITS(filename)
                if status:
                    return status
                    
            if format is None:
                try:
                    t = pyfits.open(filename)
                    format = 'fits'
                except:
                    print 'Try ascii:'
                    line = open(filename).readline()
                    if line.strip().startswith('#'):
                        if line.split()[1].isdigit():
                            format='ascii.sextractor'
                        else:
                            format='ascii.commented_header'
                    else:
                        format='ascii.basic'

                    data = self.read(filename, format=format, *args, **kwargs)
                    data.input_format = {'format':format}
                    data.filename = filename
                    
                    if save_FITS:
                        #threedhst.showMessage('write', warn=True)
                        data.write_FITS()
                        
        if format == 'fits':
            t = pyfits.open(filename)
            if t[0].header['EXTEND']:
                if 'EXTNAME' in t[1].header:
                    if t[1].header['EXTNAME'] == 'LDAC_IMHEAD':
                        hdu = 2
                    else:
                        hdu = 1
                else:
                    hdu = 1
            else:
                hdu = 0

            data = self.read(filename, format='fits', hdu=hdu, *args, **kwargs)            
            data.input_format = {'format':'fits','HDU':hdu}

        data.filename = filename
        return data
コード例 #27
0
def prep_direct_grism_pair(direct_asn='goodss-34-F140W_asn.fits', grism_asn='goodss-34-G141_asn.fits', radec=None, raw_path='../RAW/', mask_grow=18, scattered_light=False, final_scale=None, skip_direct=False, ACS=False, jump=False, order=2, get_shift=True, align_threshold=20, column_average=True, sky_iter=3, run_acs_lacosmic=False):
    """
    Process both the direct and grism observations of a given visit
    """
    import threedhst.prep_flt_astrodrizzle as prep
    import drizzlepac
    from stwcs import updatewcs
    
    import time
    
    t0 = time.time()
    
    #direct_asn='goodss-34-F140W_asn.fits'; grism_asn='goodss-34-G141_asn.fits'; radec=None; raw_path='../RAW/'
    #radec = os.getenv('THREEDHST') + '/ASTRODRIZZLE_FLT/Catalog/goodss_radec.dat'
    
    ################################
    #### Direct image processing
    ################################
    
    #### xx add astroquery 2MASS/SDSS workaround for radec=None
    
    if not skip_direct:

        #### Get fresh FLTS from ../RAW/
        asn = threedhst.utils.ASNFile(direct_asn)
        if ACS:
            for exp in asn.exposures:
                print 'cp %s/%s_flc.fits.gz .' %(raw_path, exp)
                os.system('cp %s/%s_flc.fits.gz .' %(raw_path, exp))
                os.system('gunzip -f %s_flc.fits.gz' %(exp))
                
                if run_acs_lacosmic:
                    try:
                        import lacosmicx
                        status = True
                    except:
                        print 'import lacosmicx failed!'
                        status = False
                    
                    if status:
                        im = pyfits.open('%s_flc.fits' %(exp), mode='update')
                        for ext in [1,2]:
                            indata = im['SCI',ext].data
                            #inmask = im['DQ',ext].data > 0

                            if im['SCI',ext].header['BUNIT'] == 'ELECTRONS':
                                gain = 1
                            else:
                                gain = 1./im[0].header['EXPTIME']

                            if 'MDRIZSK0' in im['SCI',ext].header:
                                pssl = im['SCI',ext].header['MDRIZSK0']
                            else:
                                pssl = 0.
                            
                            if 'FLASHLVL' in im[0].header:
                                pssl += im[0].header['FLASHLVL']
                                sig_scale = 1.8
                            else:
                                sig_scale = 1.
                                
                            out = lacosmicx.lacosmicx(indata, inmask=None, 
                                    sigclip=3.5*sig_scale, sigfrac=0.2,
                                    objlim=7.0, gain=gain,
                                    readnoise=im[0].header['READNSEA'], 
                                    satlevel=np.inf, pssl=pssl, niter=5,
                                    sepmed=True, cleantype='meanmask',
                                    fsmode='median', psfmodel='gauss',
                                    psffwhm=2.5,psfsize=7, psfk=None,
                                    psfbeta=4.765, verbose=True)
                        
                            crmask, cleanarr  = out
                            im['DQ',ext].data |= 16*crmask
                            
                            ### Low pixels
                            if im[0].header['INSTRUME'] == 'WFC3':
                                bad = im['SCI',ext].data < -4*im['ERR',ext].data
                                im['DQ',ext].data |= 16*bad
                                
                        im.flush()
                        
        else:
            threedhst.process_grism.fresh_flt_files(direct_asn, from_path=raw_path)
        
        if (not ACS):
            #### Subtract WFC3/IR direct backgrounds
            prep.subtract_flt_background(root=direct_asn.split('_asn')[0], scattered_light=scattered_light, order=order)
            #### Flag IR CRs again within runTweakReg
        
        #### Run TweakReg
        if (radec is None) & (not ACS):
            print len(asn.exposures)
            
            if len(asn.exposures) > 1:
                drizzlepac.astrodrizzle.AstroDrizzle(direct_asn, clean=True, final_scale=None, final_pixfrac=0.8, context=False, final_bits=576, preserve=False, driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7') 
            else:
                drizzlepac.astrodrizzle.AstroDrizzle(direct_asn, clean=True, final_scale=None, final_pixfrac=1, context=False, final_bits=576, preserve=False, driz_separate=False, driz_sep_wcs=False, median=False, blot=False, driz_cr=False, driz_cr_corr=False, driz_combine=True) 
        else:
            if get_shift:
                prep.runTweakReg(asn_file=direct_asn, master_catalog=radec, final_scale=None, ACS=ACS, threshold=align_threshold)
        
        #### Subtract background of direct ACS images
        if ACS:
            for exp in asn.exposures:
                flc = pyfits.open('%s_flc.fits' %(exp), mode='update')
                if 'SUB' in flc[0].header['APERTURE']:
                    extensions = [1]
                else:
                    extensions = [1,4]
                    
                for ext in extensions:
                    threedhst.showMessage('Subtract background from %s_flc.fits[%d] : %.4f' %(exp, ext, flc[ext].header['MDRIZSKY']))
                    flc[ext].data -= flc[ext].header['MDRIZSKY']
                    flc[ext].header['MDRIZSK0'] = flc[ext].header['MDRIZSKY']
                    flc[ext].header['MDRIZSKY'] = 0.
                #
                flc.flush()
        else:
            pass
            #### Do this later, gives segfaults here???
            #prep.subtract_flt_background(root=direct_asn.split('_asn')[0], scattered_light=scattered_light)
            #### Flag CRs again on BG-subtracted image
            #drizzlepac.astrodrizzle.AstroDrizzle(direct_asn, clean=True, final_scale=None, final_pixfrac=0.8, context=False, final_bits=576, preserve=False, driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7') # ,
        
    ################################
    #### Grism image processing
    ################################
    
    if grism_asn:
        asn = threedhst.utils.ASNFile(grism_asn)
        if ACS:
            for exp in asn.exposures:
                print 'cp %s/%s_flc.fits.gz .' %(raw_path, exp)
                os.system('cp %s/%s_flc.fits.gz .' %(raw_path, exp))
                os.system('gunzip -f %s_flc.fits.gz' %(exp))
                updatewcs.updatewcs('%s_flc.fits' %(exp))

            prep.copy_adriz_headerlets(direct_asn=direct_asn, grism_asn=grism_asn, ACS=True)
            prep.subtract_acs_grism_background(asn_file=grism_asn, final_scale=None)
        else:
            #### Remove the sky and flag CRs
            ## with mask from rough zodi-only subtraction
            prep.subtract_grism_background(asn_file=grism_asn, PATH_TO_RAW='../RAW/', final_scale=None, visit_sky=True, column_average=False, mask_grow=mask_grow, first_run=True)
            ## Redo making mask from better combined image
            prep.subtract_grism_background(asn_file=grism_asn, PATH_TO_RAW='../RAW/', final_scale=final_scale, visit_sky=True, column_average=column_average, mask_grow=mask_grow, first_run=False, sky_iter=sky_iter)
                        
            #### Copy headers from direct images
            if radec is not None:
                prep.copy_adriz_headerlets(direct_asn=direct_asn, grism_asn=grism_asn, ACS=False)
                #### Run CR rejection with final shifts
                drizzlepac.astrodrizzle.AstroDrizzle(grism_asn, clean=True, skysub=False, final_wcs=True, final_scale=final_scale, final_pixfrac=0.8, context=False, final_bits=576, driz_sep_bits=576, preserve=False, driz_cr_snr='8.0 5.0', driz_cr_scale='2.5 0.7') # driz_cr_snr='5.0 4.0', driz_cr_scale = '2.5 0.7')
                
    if not grism_asn:
        t1 = time.time()
        threedhst.showMessage('direct: %s\n\nDone (%d s).' %(direct_asn, int(t1-t0)))
    else:
        t1 = time.time()
        threedhst.showMessage('direct: %s\ngrism: %s\n\nDone (%d s).' %(direct_asn, grism_asn, int(t1-t0)))
コード例 #28
0
ファイル: catIO.py プロジェクト: gbrammer/threedhst
    def helper_read(self, filename, format=None, check_FITS=True, save_FITS=False, *args, **kwargs):
        """
        Wrapper around `astropy.table.Table.read() for better
        auto-sensing of ascii table formats
        """
        if not os.path.exists(filename):
            threedhst.showMessage('File %s not found.' %(filename), warn=True)
            return False
        
        if format is not None:
            data = table_base.read(filename, format=format, *args, **kwargs)
            data.input_format = {'format':format}
            data.filename = filename
            return data

        if filename.lower().endswith('.fits'):
            format = 'fits'
        else:
            ### Try to read the ".FITS" version first
            if check_FITS:
                #threedhst.showMessage('read', warn=True)
                status = self.load_FITS(filename)
                if status:
                    return status
                    
            if format is None:
                try:
                    t = pyfits.open(filename)
                    format = 'fits'
                except:
                    print('Try ascii:')
                    line = open(filename).readline()
                    if line.strip().startswith('#'):
                        if line.split()[1].isdigit():
                            format='ascii.sextractor'
                        else:
                            format='ascii.commented_header'
                    else:
                        format='ascii.basic'

                    data = self.read(filename, format=format, *args, **kwargs)
                    data.input_format = {'format':format}
                    data.filename = filename
                    
                    #### Split "APER" columns
                    if format == 'ascii.sextractor':
                        combine_columns = ['FLUX_APER', 'FLUXERR_APER', 'MAG_APER', 'MAGERR_APER', 'FLUX_RADIUS']
                        for base_col in combine_columns:
                            if '%s2'  %(base_col) in data.colnames:
                                continue
                            
                            ncol = 0
                            for col in data.colnames:
                                if base_col in col:
                                    ncol += 1
                            
                            if ncol == 0:
                                continue
                                
                            dtype = data[base_col].dtype
                            out = np.zeros((len(data), ncol), dtype=dtype)
                            for i in range(ncol):
                                if i == 0:
                                    incol = base_col
                                else:
                                    incol = '%s_%d' %(base_col, i)
                                
                                out[:,i] = data[incol]
                                data.remove_column(incol)
                            
                            #data.remove_column(base_col)
                            data.add_column(Column(name=base_col, data=out))
                            
                    if save_FITS:
                        #threedhst.showMessage('write', warn=True)
                        data.write_FITS()
                        
        if format == 'fits':
            t = pyfits.open(filename)
            if t[0].header['EXTEND']:
                if 'EXTNAME' in t[1].header:
                    if t[1].header['EXTNAME'] == 'LDAC_IMHEAD':
                        hdu = 2
                    else:
                        hdu = 1
                else:
                    hdu = 1
            else:
                hdu = 0

            data = self.read(filename, format='fits', hdu=hdu, *args, **kwargs)            
            data.input_format = {'format':'fits','HDU':hdu}

        data.filename = filename
        return data
コード例 #29
0
def grism_sky_column_average(asn_file='GDN12-G102_asn.fits',
                             iter=2,
                             mask_grow=8):
    """
    Remove column-averaged residuals from grism exposures
    """
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits

    asn = threedhst.utils.ASNFile(asn_file)

    for k in range(len(asn.exposures)):
        #### 1D column averages
        flt = pyfits.open('%s_flt.fits' % (asn.exposures[k]), mode='update')
        segfile = '%s_flt.seg.fits' % (asn.exposures[k])
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=mask_grow) == 0
        dq_ok = (flt[3].data & (4 + 32 + 16 + 512 + 2048 + 4096)) == 0

        mask = seg_mask & dq_ok & (flt[2].data > 0)

        #### Iterative clips on percentile
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 98)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 2))
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 84)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 16))

        residuals = []
        for j in range(iter):
            masked = flt[1].data * 1
            masked[~mask] = np.nan
            yres = np.zeros(1014)
            for i in range(1014):
                # ymsk = mask[:,i]
                # yres[i] = np.median(flt[1].data[ymsk,i])
                ymsk = masked[:, i]
                #ymsk = masked[:,np.maximum(i-10,0):i+10]
                #yres[i] = np.median(ymsk[np.isfinite(ymsk)])
                ok = np.isfinite(ymsk)
                ymsk[(ymsk > np.percentile(ymsk[ok], 84)) |
                     (ymsk < np.percentile(ymsk[ok], 16))] = np.nan
                yres[i] = np.mean(ymsk[np.isfinite(ymsk)])

            #
            resid = threedhst.utils.medfilt(yres, 41)
            #
            #resid = np.dot(np.ones((1014,1)), yres_sm.reshape(1,1014))
            flt[1].data -= resid
            residuals.append(resid * 1)

        threedhst.showMessage('Remove column average: %s' % (asn.exposures[k]))
        flt.flush()
        #flt.writeto(flt.filename(), clobber=True)

        #plt.plot(yres_sm)

        ### Make figure
        from matplotlib.figure import Figure
        from matplotlib.backends.backend_agg import FigureCanvasAgg

        fig = Figure(figsize=[6, 4], dpi=100)

        fig.subplots_adjust(wspace=0.25,
                            hspace=0.02,
                            left=0.15,
                            bottom=0.08,
                            right=0.97,
                            top=0.92)

        ax = fig.add_subplot(111)
        ax.set_xlim(0, 1014)
        ax.set_title(flt.filename())

        ax.plot(yres, color='black')
        for resid in residuals:
            ax.plot(resid, color='red', linewidth=2, alpha=0.7)

        ax.set_xlim(0, 1014)

        canvas = FigureCanvasAgg(fig)
        canvas.print_figure(flt.filename().split('.fits')[0] + '.column.png',
                            dpi=100,
                            transparent=False)
コード例 #30
0
def process_acs_pair(asn_direct_file='ib3706050_asn.fits',
                     asn_grism_file='ib3706060_asn.fits',
                     field='COSMOS',
                     ALIGN_IMAGE='../ACS/h_nz_sect*img.fits',
                     ALIGN_EXTENSION=0,
                     SKIP_GRISM=False,
                     adjust_targname=True,
                     align_geometry='shift',
                     PATH_TO_RAW='../RAW',
                     get_shift=True,
                     TWEAKSHIFTS_ONLY=False,
                     FLC=True):
    """
    Does the basic processing for ACS F814W and G800L pointings: background subtraction, allignment and drizzlign.
    """

    import threedhst
    import threedhst.prep_flt_files
    from threedhst.prep_flt_files import make_targname_asn

    #### Copy corrected FLT files to .
    asn = threedhst.utils.ASNFile(asn_direct_file)
    for exp in asn.exposures:
        print exp
        os.system('rm %s_flt.fits' % (exp))
        if FLC:
            os.system('cp ../RAW/%s_flc.fits %s_flt.fits' % (exp, exp))
        else:
            os.system('cp ../FIXED/%s_flt.fits . ' % (exp))
    #
    asn = threedhst.utils.ASNFile(asn_grism_file)
    for exp in asn.exposures:
        print exp
        os.system('rm %s_flt.fits' % (exp))
        if FLC:
            os.system('cp ../RAW/%s_flc.fits %s_flt.fits' % (exp, exp))
        else:
            os.system('cp ../FIXED/%s_flt.fits . ' % (exp))

    #DIRECT REDUCTION
    ROOT_DIRECT = asn_direct_file.split('_asn.fits')[0]

    from threedhst.prep_flt_files import make_targname_asn

    #this makes new asn.fits files but with ACS the names start with ANY
    #must add an optional tag to replace ANY with the field name
    if (asn_direct_file is not None) & adjust_targname:
        asn_direct_file = make_targname_asn(asn_direct_file,
                                            field=field,
                                            ext='flc')

    if (asn_grism_file is not None) & adjust_targname:
        asn_grism_file = make_targname_asn(asn_grism_file,
                                           field=field,
                                           ext='flc')

    #run = threedhst.prep_flt_files.MultidrizzleRun((asn_direct_file.split('_asn.fits')[0]).upper())
    threedhst.shifts.run_tweakshifts(asn_direct_file, verbose=True)
    threedhst.prep_flt_files.startMultidrizzle(asn_direct_file,
                                               use_shiftfile=True,
                                               skysub=True,
                                               final_scale=0.05,
                                               pixfrac=1,
                                               driz_cr=True,
                                               updatewcs=True,
                                               clean=True,
                                               median=True)

    for i, exp in enumerate(asn.exposures):
        asn_mask = asn.exposures[i] + '_flt.fits.mask.reg'
        print asn_mask
        if os.path.exists(asn_mask):
            threedhst.showMessage("Apply ASN mask: %s" % (asn_mask))
            threedhst.regions.apply_dq_mask(asn.exposures[i] + '_flt.fits',
                                            extension=3,
                                            mask_file=asn_mask)

    threedhst.shifts.refine_shifts(
        ROOT_DIRECT=asn_direct_file.split('_as')[0].upper(),
        ALIGN_IMAGE=ALIGN_IMAGE,
        ALIGN_EXTENSION=ALIGN_EXTENSION,
        fitgeometry=align_geometry,
        clean=True)

    unicorn.go_acs.testing_f814w_background(asn_direct_file)

    SCALE = 0.06
    PIXFRAC = 1.0

    threedhst.prep_flt_files.startMultidrizzle(asn_direct_file,
                                               use_shiftfile=True,
                                               skysub=True,
                                               final_scale=SCALE,
                                               pixfrac=PIXFRAC,
                                               driz_cr=False,
                                               updatewcs=True,
                                               clean=True,
                                               median=False)

    #GRISM REDUCTION

    threedhst.shifts.make_grism_shiftfile(asn_direct_file, asn_grism_file)

    threedhst.prep_flt_files.startMultidrizzle(asn_grism_file,
                                               use_shiftfile=True,
                                               skysub=True,
                                               final_scale=SCALE,
                                               pixfrac=PIXFRAC,
                                               driz_cr=True,
                                               updatewcs=True,
                                               clean=False,
                                               median=True)

    unicorn.go_acs.testing_g800l_background(asn_grism_file)

    threedhst.prep_flt_files.startMultidrizzle(asn_grism_file,
                                               use_shiftfile=True,
                                               skysub=True,
                                               final_scale=SCALE,
                                               pixfrac=PIXFRAC,
                                               driz_cr=True,
                                               updatewcs=True,
                                               clean=False,
                                               median=True)
コード例 #31
0
def subtract_flt_background(root='GOODN-N1-VBA-F105W', scattered_light=False, sex_background=False, order=2):
    """
    Subtract polynomial background
    """
    import scipy.optimize
    
    import astropy.units as u
    
    from astropy.table import Table as table
    
    import stwcs
    from stwcs import updatewcs
    
    import drizzlepac
    from drizzlepac import astrodrizzle, tweakreg, tweakback
    
    import threedhst
    
    asn = threedhst.utils.ASNFile(root+'_asn.fits')
    for exp in asn.exposures:
        updatewcs.updatewcs('%s_%s.fits' %(exp, 'flt'))

    if not os.path.exists('%s_drz_sci.fits' %(root)):        
        if len(asn.exposures) == 1:
            drizzlepac.astrodrizzle.AstroDrizzle(root+'_asn.fits', clean=False, context=False, preserve=False, skysub=True, driz_separate=False, driz_sep_wcs=False, median=False, blot=False, driz_cr=False, driz_cr_corr=False, driz_combine=True)
        else:
            drizzlepac.astrodrizzle.AstroDrizzle(root+'_asn.fits', clean=False, context=False, preserve=False, skysub=True, driz_separate=True, driz_sep_wcs=True, median=True, blot=True, driz_cr=True, driz_cr_corr=True, driz_combine=True)
    
    se = threedhst.sex.SExtractor()
    se.options['WEIGHT_IMAGE'] = '%s_drz_wht.fits' %(root)
    se.options['WEIGHT_TYPE'] = 'MAP_WEIGHT'
    se.options['CHECKIMAGE_TYPE'] = 'SEGMENTATION,BACKGROUND'
    se.options['CHECKIMAGE_NAME'] = '%s_drz_seg.fits,%s_drz_bkg.fits' %(root, root)
    se.options['BACK_TYPE'] = 'AUTO'
    se.options['BACK_SIZE'] = '256'
    #
    se.params['X_IMAGE'] = True; se.params['Y_IMAGE'] = True
    se.params['MAG_AUTO'] = True
    #
    se.options['CATALOG_NAME'] = '%s_drz_sci.cat' %(root)
    se.options['FILTER'] = 'Y'
    se.copyConvFile()
    se.options['FILTER_NAME'] = 'gauss_4.0_7x7.conv'
    se.options['DETECT_THRESH'] = '0.8'
    se.options['ANALYSIS_THRESH'] = '0.8'
    #
    se.options['MEMORY_OBJSTACK'] = '30000'
    se.options['MEMORY_PIXSTACK'] = '3000000'
    se.options['MEMORY_BUFSIZE'] = '2048'
    
    se.sextractImage('%s_drz_sci.fits' %(root))
    #threedhst.sex.sexcatRegions('%s_flt.cat' %(exp), '%s_flt.reg' %(exp), format=1)
    
    #### Blot segmentation map to FLT images for object mask
    asn = threedhst.utils.ASNFile('%s_asn.fits' %(root))
    
    #print 'Read files...'
    ref = pyfits.open('%s_drz_sci.fits' %(root))
    ref_wcs = stwcs.wcsutil.HSTWCS(ref, ext=0)

    seg = pyfits.open('%s_drz_seg.fits' %(root))    
    #### Fill ref[0].data with zeros for seg mask
    #seg_data = ref[0].data
    #seg_data[seg[0].data == 0] = 0
    seg_data = np.cast[np.float32](seg[0].data)
    
    bkg_data = pyfits.open('%s_drz_bkg.fits' %(root))[0].data
      
    yi, xi = np.indices((1014,1014))
    if scattered_light:        
        bg_components = np.ones((4,1014,1014))
        bg_components[1,:,:] = xi/1014.*2
        bg_components[2,:,:] = yi/1014.*2
        bg_components[3,:,:] = pyfits.open(os.getenv('THREEDHST') + '/CONF/G141_scattered_light.fits')[0].data
        #### Use flat-field itself for images affected by full-field 
        #### persistence from the tungsten lamp
        if scattered_light == 2:
            bg_components[3,:,:] = pyfits.open(os.getenv('iref') + 'flat_UDF_F140W_v0.fits')[1].data[5:-5,5:-5]
            
        NCOMP=4
    else:
        # bg_components = np.ones((3,1014,1014))
        # bg_components[1,:,:] = xi/1014.*2
        # bg_components[2,:,:] = yi/1014.*2
        # NCOMP=3
        #
        if order == 2:
            NCOMP=6
            bg_components = np.ones((NCOMP,1014,1014))
            bg_components[1,:,:] = (xi-507)/507.
            bg_components[2,:,:] = (yi-507)/507.
            bg_components[3,:,:] = ((xi-507)/507.)**2
            bg_components[4,:,:] = ((yi-507)/507.)**2
            bg_components[5,:,:] = (xi-507)*(yi-507)/507.**2
        else:
            NCOMP=3
            bg_components = np.ones((NCOMP,1014,1014))
            bg_components[1,:,:] = (xi-507)/507.
            bg_components[2,:,:] = (yi-507)/507.
            
    bg_flat = bg_components.reshape((NCOMP,1014**2))
    
    #### Loop through FLTs, blotting reference and segmentation
    models = []
    for exp in asn.exposures:
        flt = pyfits.open('%s_flt.fits' %(exp)) #, mode='update')
        flt_wcs = stwcs.wcsutil.HSTWCS(flt, ext=1)
        
        ### segmentation        
        print 'Segmentation image: %s_blot.fits' %(exp)
        blotted_seg = astrodrizzle.ablot.do_blot(seg_data, ref_wcs, flt_wcs, 1, coeffs=True, interp='nearest', sinscl=1.0, stepsize=10, wcsmap=None)
        
        blotted_bkg = 0.
        if sex_background:
            blotted_bkg = astrodrizzle.ablot.do_blot(bkg_data, ref_wcs, flt_wcs, 1, coeffs=True, interp='nearest', sinscl=1.0, stepsize=10, wcsmap=None)
            flt[1].data -= blotted_bkg
            
        mask = (blotted_seg == 0) & (flt['DQ'].data == 0) & (flt[1].data > -1) & (xi > 10) & (yi > 10) & (xi < 1004) & (yi < 1004)
        mask &= (flt[1].data < 5*np.median(flt[1].data[mask]))
        data_range = np.percentile(flt[1].data[mask], [2.5, 97.5])
        mask &= (flt[1].data >= data_range[0]) & (flt[1].data <= data_range[1])
        data_range = np.percentile(flt[2].data[mask], [0.5, 99.5])
        mask &= (flt[2].data >= data_range[0]) & (flt[2].data <= data_range[1])
        
        ### Least-sq fit for component normalizations
        data = flt[1].data[mask].flatten()
        wht = (1./flt[2].data[mask].flatten())**2
        templates = bg_flat[:, mask.flatten()]
        p0 = np.zeros(NCOMP)
        p0[0] = np.median(data)
        obj_fun = threedhst.grism_sky.obj_lstsq
        popt = scipy.optimize.leastsq(obj_fun, p0, args=(data, templates, wht), full_output=True, ftol=1.49e-8/1000., xtol=1.49e-8/1000.)
        xcoeff = popt[0]
        model = np.dot(xcoeff, bg_flat).reshape((1014,1014))
        models.append(model)
        
        # add header keywords of the fit components
        flt = pyfits.open('%s_flt.fits' %(exp), mode='update')
        flt[1].data -= model+blotted_bkg
        for i in range(NCOMP):
            if 'BGCOMP%d' %(i+1) in flt[0].header:
                flt[0].header['BGCOMP%d' %(i+1)] += xcoeff[i]
            else:
                flt[0].header['BGCOMP%d' %(i+1)] = xcoeff[i]                
        
        flt.flush()
        coeff_str = '  '.join(['%.4f' %c for c in xcoeff])
        threedhst.showMessage('Background subtraction, %s_flt.fits:\n\n  %s' %(exp, coeff_str))
        
    return models
コード例 #32
0
ファイル: sex.py プロジェクト: aragilar/threedhst
 def swarpImage(self,inputImage, mode='direct', verbose=True):
     """
     swarpImage(self,inputImage,mode='waiterror', verbose=True)
     
     Writes configuration files and runs swarp on the input image
     
     mode can be:
     
     * 'waiterror': waits for swarp to finish, and raises an 
       SExtractorError if it does not complete sucessfully. stdout 
       and sterr are saved to self.lastout and self.lasterr (returns 0)
     * 'wait': waits for swarp to finish and returns the return code
       stdout and sterr are saved to self.lastout and self.lasterr
     * 'proc': stars the processes but does not wait - returns the Popen 
       instance of the processes
     """
     from subprocess import Popen, PIPE
     from os.path import exists
     
     self.swarpInputImage = inputImage
     
     fnbase = self.name
     if not self.overwrite:
         fnbase = fnbase.replace('.swarp','')
         if exists(fnbase+'.swarp'):
             fns = fnbase.split('-')
             try:
                 i = int(fns[-1])
                 i+=1
             except ValueError:
                 i = 2
             if len(fns)<2:
                 fns.append(str(i))
             else:
                 fns[-1] = str(i)
             fnbase = '-'.join(fns)
         self.name = fnbase
             
     self._saveFiles(fnbase)
     if isinstance(inputImage,list):
         imgList = ' '.join(inputImage)
     else:
         imgList = inputImage
     
     clstr = 'swarp %s -c %s' %(imgList,self.name+'.swarp')
     
     #print "\n3DHST.sex.swarp.swarpImage:\n\n %s\n" %clstr
     if verbose:
         threedhst.showMessage('Running swarp: %s' %clstr)
     
     if mode == 'waiterror' or mode =='wait':
         # proc = Popen(clstr.split(),
         #              executable='swarp',stdout=PIPE,stderr=PIPE)
         #### Send STDERR output to temporary file because 
         #### swarp seems to spawn additional processed for large files
         #### and proc.wait() never finishes
         fp = open('sex_stderr','w')
         proc = Popen(clstr.split(),executable='swarp', stdout=PIPE,
                      stderr=fp)
         res = proc.wait()
         fp.close()
         
         if verbose: 
             print 'Done.\n'
         
         sout, serr = proc.communicate()
         
         ## Read stderr output
         fp = open('sex_stderr','r')
         serr = ' '.join(fp.readlines())
         fp.close()
         
         self.lastout = sout
         self.lasterr = serr
         
         if res!=0 and mode == 'waiterror' :
             raise SError(serr,sout)
         return res
     elif mode == 'proc':
         return proc
     elif mode == 'direct':
         proc = Popen(clstr.split()) #,executable='swarp' #,stdout=PIPE,stderr=PIPE)
         res = proc.wait()
     else:
         raise ValueError('unrecognized mode argument '+str(mode))
コード例 #33
0
def drzTweakReg(sci='goodss-34-F140W_drz_sci.fits', master_catalog='goodss_radec.dat', threshold=20, apply=True):
    import drizzlepac
    from drizzlepac import tweakback
    from stwcs import updatewcs
    from threedhst import catIO
    
    se = threedhst.sex.SExtractor()
    se.options['WEIGHT_IMAGE'] = sci.replace('sci','wht')
    se.options['WEIGHT_TYPE'] = 'MAP_WEIGHT'
    #
    se.params['X_IMAGE'] = True; se.params['Y_IMAGE'] = True
    se.params['X_WORLD'] = True; se.params['Y_WORLD'] = True
    se.params['MAG_AUTO'] = True
    #
    se.options['CATALOG_NAME'] = sci+'.align.cat'
    se.options['FILTER'] = 'N'
    se.options['DETECT_THRESH'] = '%f' %(threshold)
    se.options['ANALYSIS_THRESH'] = '%f' %(threshold)
    #
    se.sextractImage(sci)
    threedhst.sex.sexcatRegions(sci+'.align.cat', sci+'.align.reg', format=2)
    
    c = catIO.Table(sci+'.align.cat', format='ascii.sextractor')
    #c.ra = c['X_WORLD']
    #c.dec = c['Y_WORLD']
    m = catIO.CoordinateMatcher(c, ra_column='X_WORLD', dec_column='Y_WORLD')
    r0, d0 = np.loadtxt(master_catalog, unpack=True)
    
    ### clip list to nearby objects
    rmed, dmed = np.median(c['X_WORLD']), np.median(c['Y_WORLD'])
    delta = np.sqrt((r0-rmed)**2/np.cos(dmed/180*np.pi)**2+(d0-dmed)**2)*60.
    nearby = delta < 8 # arcmin
    r0, d0 = r0[nearby], d0[nearby]
    
    dr, idx = m.match_list(r0, d0)
    
    dx = (c['X_WORLD'][idx]-r0)*np.cos(d0/180*np.pi)*3600
    dy = (c['Y_WORLD'][idx]-d0)*3600

         
    x0 = (c['X_WORLD'][idx]-np.median(c['X_WORLD']))*np.cos(d0/180*np.pi)*3600
    y0 = (c['Y_WORLD'][idx]-np.median(c['Y_WORLD']))*3600
    
    ok = dr < 1.5 
    if ok.sum() == 0:
        threedhst.showMessage('No matches found within 1.5".')
        return False
    
    # plt.scatter(x0[ok], y0[ok], color='black')
    # for i in np.arange(len(ok))[ok]:
    #     plt.plot(x0[i]+np.array([0, dx[i]*20]), y0[i]+np.array([0, dy[i]*20]), color='black')
     
    dra = (c['X_WORLD'][idx]-r0)
    dde = (c['Y_WORLD'][idx]-d0) 
    rshift, dshift = np.median(dra[ok]), np.median(dde[ok])
    
    fp = open(sci.split('.fits')[0]+'.align.dat','w')
    lines = ['# dx dy xrms yrms N\n# %s %s\n' %(sci, master_catalog), '%f %f %f %f %d\n' %(np.median(dx[ok]), np.median(dy[ok]), np.std(dx[ok]), np.std(dy[ok]), ok.sum())]
    fp.writelines(lines)
    fp.close()
    threedhst.showMessage(''.join(lines))
    
    if not apply:
        print 'Not applying shift.  Re-run with apply=true to apply them.'
        return rshift, dshift
        
    for fits in [sci.replace('sci','wht'), sci]:
        print 'Update WCS: %s' %(fits)
        im = pyfits.open(fits, mode='update')
        im[0].header['CRVAL1'] -= rshift
        im[0].header['CRVAL2'] -= dshift
        im.flush()
    
    im = pyfits.open(sci)
    for i in range(im[0].header['NDRIZIM']):
        flt_im = im[0].header['D%03dDATA' %(i+1)].split('[')[0]
        print 'Update WCS: %s' %(flt_im)
        flt = pyfits.open(flt_im, mode='update')
        for ext in [1,2]:
            flt[ext].header['CRVAL1'] -= rshift
            flt[ext].header['CRVAL2'] -= dshift

        flt.flush()
コード例 #34
0
def subtract_flt_background(root='GOODN-N1-VBA-F105W',
                            scattered_light=False,
                            sex_background=False,
                            order=2):
    """
    Subtract polynomial background
    """
    import scipy.optimize

    import astropy.units as u

    from astropy.table import Table as table

    import stwcs
    from stwcs import updatewcs

    import drizzlepac
    from drizzlepac import astrodrizzle, tweakreg, tweakback

    import threedhst

    asn = threedhst.utils.ASNFile(root + '_asn.fits')
    for exp in asn.exposures:
        updatewcs.updatewcs('%s_%s.fits' % (exp, 'flt'))

    if not os.path.exists('%s_drz_sci.fits' % (root)):
        if len(asn.exposures) == 1:
            drizzlepac.astrodrizzle.AstroDrizzle(root + '_asn.fits',
                                                 clean=False,
                                                 context=False,
                                                 preserve=False,
                                                 skysub=True,
                                                 driz_separate=False,
                                                 driz_sep_wcs=False,
                                                 median=False,
                                                 blot=False,
                                                 driz_cr=False,
                                                 driz_cr_corr=False,
                                                 driz_combine=True)
        else:
            drizzlepac.astrodrizzle.AstroDrizzle(root + '_asn.fits',
                                                 clean=False,
                                                 context=False,
                                                 preserve=False,
                                                 skysub=True,
                                                 driz_separate=True,
                                                 driz_sep_wcs=True,
                                                 median=True,
                                                 blot=True,
                                                 driz_cr=True,
                                                 driz_cr_corr=True,
                                                 driz_combine=True)

    se = threedhst.sex.SExtractor()
    se.options['WEIGHT_IMAGE'] = '%s_drz_wht.fits' % (root)
    se.options['WEIGHT_TYPE'] = 'MAP_WEIGHT'
    se.options['CHECKIMAGE_TYPE'] = 'SEGMENTATION,BACKGROUND'
    se.options['CHECKIMAGE_NAME'] = '%s_drz_seg.fits,%s_drz_bkg.fits' % (root,
                                                                         root)
    se.options['BACK_TYPE'] = 'AUTO'
    se.options['BACK_SIZE'] = '256'
    #
    se.params['X_IMAGE'] = True
    se.params['Y_IMAGE'] = True
    se.params['MAG_AUTO'] = True
    #
    se.options['CATALOG_NAME'] = '%s_drz_sci.cat' % (root)
    se.options['FILTER'] = 'Y'
    se.copyConvFile()
    se.options['FILTER_NAME'] = 'gauss_4.0_7x7.conv'
    se.options['DETECT_THRESH'] = '0.8'
    se.options['ANALYSIS_THRESH'] = '0.8'
    #
    se.options['MEMORY_OBJSTACK'] = '30000'
    se.options['MEMORY_PIXSTACK'] = '3000000'
    se.options['MEMORY_BUFSIZE'] = '2048'

    se.sextractImage('%s_drz_sci.fits' % (root))
    #threedhst.sex.sexcatRegions('%s_flt.cat' %(exp), '%s_flt.reg' %(exp), format=1)

    #### Blot segmentation map to FLT images for object mask
    asn = threedhst.utils.ASNFile('%s_asn.fits' % (root))

    #print 'Read files...'
    ref = pyfits.open('%s_drz_sci.fits' % (root))
    ref_wcs = stwcs.wcsutil.HSTWCS(ref, ext=0)

    seg = pyfits.open('%s_drz_seg.fits' % (root))
    #### Fill ref[0].data with zeros for seg mask
    #seg_data = ref[0].data
    #seg_data[seg[0].data == 0] = 0
    seg_data = np.cast[np.float32](seg[0].data)

    bkg_data = pyfits.open('%s_drz_bkg.fits' % (root))[0].data

    yi, xi = np.indices((1014, 1014))
    if scattered_light:
        bg_components = np.ones((4, 1014, 1014))
        bg_components[1, :, :] = xi / 1014. * 2
        bg_components[2, :, :] = yi / 1014. * 2
        bg_components[3, :, :] = pyfits.open(
            os.getenv('THREEDHST') + '/CONF/G141_scattered_light.fits')[0].data
        #### Use flat-field itself for images affected by full-field
        #### persistence from the tungsten lamp
        if scattered_light == 2:
            bg_components[3, :, :] = pyfits.open(
                os.getenv('iref') + 'flat_UDF_F140W_v0.fits')[1].data[5:-5,
                                                                      5:-5]

        NCOMP = 4
    else:
        # bg_components = np.ones((3,1014,1014))
        # bg_components[1,:,:] = xi/1014.*2
        # bg_components[2,:,:] = yi/1014.*2
        # NCOMP=3
        #
        if order == 2:
            NCOMP = 6
            bg_components = np.ones((NCOMP, 1014, 1014))
            bg_components[1, :, :] = (xi - 507) / 507.
            bg_components[2, :, :] = (yi - 507) / 507.
            bg_components[3, :, :] = ((xi - 507) / 507.)**2
            bg_components[4, :, :] = ((yi - 507) / 507.)**2
            bg_components[5, :, :] = (xi - 507) * (yi - 507) / 507.**2
        else:
            NCOMP = 3
            bg_components = np.ones((NCOMP, 1014, 1014))
            bg_components[1, :, :] = (xi - 507) / 507.
            bg_components[2, :, :] = (yi - 507) / 507.

    bg_flat = bg_components.reshape((NCOMP, 1014**2))

    #### Loop through FLTs, blotting reference and segmentation
    models = []
    for exp in asn.exposures:
        flt = pyfits.open('%s_flt.fits' % (exp))  #, mode='update')
        flt_wcs = stwcs.wcsutil.HSTWCS(flt, ext=1)

        ### segmentation
        print('Segmentation image: %s_blot.fits' % (exp))
        blotted_seg = astrodrizzle.ablot.do_blot(seg_data + 0,
                                                 ref_wcs,
                                                 flt_wcs,
                                                 1,
                                                 coeffs=True,
                                                 interp='nearest',
                                                 sinscl=1.0,
                                                 stepsize=10,
                                                 wcsmap=None)

        blotted_bkg = 0.
        if sex_background:
            blotted_bkg = astrodrizzle.ablot.do_blot(bkg_data + 0,
                                                     ref_wcs,
                                                     flt_wcs,
                                                     1,
                                                     coeffs=True,
                                                     interp='nearest',
                                                     sinscl=1.0,
                                                     stepsize=10,
                                                     wcsmap=None)
            flt[1].data -= blotted_bkg

        mask = (blotted_seg == 0) & (flt['DQ'].data
                                     == 0) & (flt[1].data > -1) & (xi > 10) & (
                                         yi > 10) & (xi < 1004) & (yi < 1004)
        mask &= np.isfinite(flt[1].data) & np.isfinite(flt[2].data)
        mask &= (flt[1].data < 5 * np.median(flt[1].data[mask]))
        data_range = np.percentile(flt[1].data[mask], [2.5, 97.5])
        mask &= (flt[1].data >= data_range[0]) & (flt[1].data <= data_range[1])
        data_range = np.percentile(flt[2].data[mask], [0.5, 99.5])
        mask &= (flt[2].data >= data_range[0]) & (flt[2].data <= data_range[1])

        ### Least-sq fit for component normalizations
        data = flt[1].data[mask].flatten()
        wht = (1. / flt[2].data[mask].flatten())**2
        templates = bg_flat[:, mask.flatten()]
        p0 = np.zeros(NCOMP)
        p0[0] = np.median(data)
        obj_fun = threedhst.grism_sky.obj_lstsq
        print('XXX: %d' % (mask.sum()))
        popt = scipy.optimize.leastsq(obj_fun,
                                      p0,
                                      args=(data, templates, wht),
                                      full_output=True,
                                      ftol=1.49e-8 / 1000.,
                                      xtol=1.49e-8 / 1000.)
        xcoeff = popt[0]
        model = np.dot(xcoeff, bg_flat).reshape((1014, 1014))
        models.append(model)

        # add header keywords of the fit components
        flt = pyfits.open('%s_flt.fits' % (exp), mode='update')
        flt[1].data -= model + blotted_bkg
        for i in range(NCOMP):
            if 'BGCOMP%d' % (i + 1) in flt[0].header:
                flt[0].header['BGCOMP%d' % (i + 1)] += xcoeff[i]
            else:
                flt[0].header['BGCOMP%d' % (i + 1)] = xcoeff[i]

        flt.flush()
        coeff_str = '  '.join(['%.4f' % c for c in xcoeff])
        threedhst.showMessage('Background subtraction, %s_flt.fits:\n\n  %s' %
                              (exp, coeff_str))

    return models
コード例 #35
0
def subtract_grism_background(asn_file='GDN1-G102_asn.fits', PATH_TO_RAW='../RAW/', final_scale=0.06, visit_sky=True, column_average=True, mask_grow=18, first_run=True, sky_iter=1):
    """
    Subtract master grism sky from FLTs
    """
    import os
    import scipy.ndimage as nd
    import pyregion
    
    from drizzlepac import astrodrizzle
    import drizzlepac
    
    from stwcs import updatewcs
    import stwcs
    
    import threedhst.grism_sky as bg
    
    asn = threedhst.utils.ASNFile(asn_file)
    root = asn_file.split('_asn')[0]
            
    sky_images = {'G141':['zodi_G141_clean.fits', 'excess_lo_G141_clean.fits', 'G141_scattered_light.fits'],
                  'G102':['zodi_G102_clean.fits', 'excess_G102_clean.fits']}
    #
    # sky_images = {'G141':['zodi_G141_clean.fits', 'excess_lo_G141_clean.fits', 'G141_scattered_light_v2.fits'],
    #               'G102':['zodi_G102_clean.fits', 'excess_G102_clean.fits']}
    
    # ### Don't use scattered light
    # sky_images = {'G141':['zodi_G141_clean.fits', 'excess_lo_G141_clean.fits'],
    #               'G102':['zodi_G102_clean.fits', 'excess_G102_clean.fits']}
    # 
    # ## Use aXe images
    # sky_images = {'G141':['WFC3.IR.G141.sky.V1.0.flat.fits', 'WFC3.IR.G141.sky.V1.0.flat.fits'],
    #               'G102':['zodi_G102_clean.fits', 'excess_G102_clean.fits']}
    
    if first_run:
        ### Rough background subtraction
        threedhst.process_grism.fresh_flt_files(asn_file, from_path=PATH_TO_RAW, preserve_dq=False)
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[0]))
        GRISM = flt[0].header['FILTER']
        bg.set_grism_flat(grism=GRISM, verbose=True)
    
        zodi = pyfits.open(os.getenv('THREEDHST')+'/CONF/%s' %(sky_images[GRISM][0]))[0].data
    
        for exp in asn.exposures:
            updatewcs.updatewcs('%s_flt.fits' %(exp))
            flt = pyfits.open('%s_flt.fits' %(exp), mode='update')
            #flt = pyfits.open('%s_flt.fits' %(exp))
            flt[1].data *= bg.flat
            #
            mask = (flt['DQ'].data == 0)
            data_range = np.percentile(flt[1].data[mask], [20, 80])
            mask &= (flt[1].data >= data_range[0]) & (flt[1].data <= data_range[1]) & (flt[2].data != 0) & np.isfinite(flt[1].data) & np.isfinite(flt[2].data)
            ### Least-sq fit for component normalizations
            data = flt[1].data[mask].flatten()
            wht = (1./flt[2].data[mask].flatten())**2
            zodi_mask = zodi[mask].flatten()
            coeff_zodi = np.sum(data*zodi_mask*wht)/np.sum(zodi_mask**2*wht)
            flt[1].data -= zodi*coeff_zodi
            flt.flush()
            threedhst.showMessage('Rough background for %s (zodi): %0.4f' %(exp, coeff_zodi))
            #templates = bg_flat[:, mask.flatten()]
        
        ### Run astrodrizzle to make DRZ mosaic, grism-SExtractor mask
        drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, context=False, preserve=False, skysub=True, driz_separate=True, driz_sep_wcs=True, median=True, blot=True, driz_cr=True, driz_combine=True, final_wcs=False, resetbits=4096, final_bits=576, driz_sep_bits=576, driz_cr_snr='8.0 5.0', driz_cr_scale = '2.5 0.7')
                
    else:
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[0]))
        GRISM = flt[0].header['FILTER']
        bg.set_grism_flat(grism=GRISM, verbose=True)
    
        
    se = threedhst.sex.SExtractor()
    se.options['WEIGHT_IMAGE'] = '%s_drz_wht.fits' %(root)
    se.options['WEIGHT_TYPE'] = 'MAP_WEIGHT'
    se.options['CHECKIMAGE_TYPE'] = 'SEGMENTATION'
    se.options['CHECKIMAGE_NAME'] = '%s_drz_seg.fits' %(root)
    #
    se.params['X_IMAGE'] = True; se.params['Y_IMAGE'] = True
    se.params['MAG_AUTO'] = True
    #
    se.options['CATALOG_NAME'] = '%s_drz_sci.cat' %(root)
    se.options['FILTER'] = 'Y'
    se.copyConvFile(grism=True)
    se.options['FILTER_NAME'] = 'grism.conv'
    se.options['DETECT_THRESH'] = '0.7'
    se.options['ANALYSIS_THRESH'] = '0.7'
    #
    se.sextractImage('%s_drz_sci.fits' %(root))
    
    #### Blot segmentation map to FLT images for object mask
    ref = pyfits.open('%s_drz_sci.fits' %(root))
    ref_wcs = stwcs.wcsutil.HSTWCS(ref, ext=0)

    seg = pyfits.open('%s_drz_seg.fits' %(root))
    seg_data = np.cast[np.float32](seg[0].data)
            
    #### Loop through FLTs, blotting reference and segmentation
    threedhst.showMessage('%s: Blotting grism segmentation masks.' %(root))
        
    for exp in asn.exposures:
        flt = pyfits.open('%s_flt.fits' %(exp))
        flt_wcs = stwcs.wcsutil.HSTWCS(flt, ext=1)
        ### segmentation
        #print 'Segmentation image: %s_blot.fits' %(exp)
        blotted_seg = astrodrizzle.ablot.do_blot(seg_data, ref_wcs, flt_wcs, 1, coeffs=True, interp='nearest', sinscl=1.0, stepsize=10, wcsmap=None)
        seg_grow = nd.maximum_filter((blotted_seg > 0)*1, size=8)
        pyfits.writeto('%s_flt.seg.fits' %(exp), header=flt[1].header, data=seg_grow, clobber=True)
        
    if first_run:
        ### Run background subtraction scripts
        threedhst.process_grism.fresh_flt_files(asn_file, from_path=PATH_TO_RAW, preserve_dq=False)
        for exp in asn.exposures:
            updatewcs.updatewcs('%s_flt.fits' %(exp))
            #threedhst.grism_sky.remove_grism_sky(flt=exp+'_flt.fits', list=sky_images[GRISM], path_to_sky=os.getenv('THREEDHST')+'/CONF/', verbose=True, second_pass=True, overall=True)
    
    if visit_sky:
        threedhst.grism_sky.remove_visit_sky(asn_file=asn_file, list=sky_images[GRISM], add_constant=False, column_average=(column_average) & (sky_iter == 1), mask_grow=mask_grow, flat_correct=first_run)
        if (sky_iter > 1) & (~first_run):
            for i in range(1, sky_iter):
                threedhst.grism_sky.remove_visit_sky(asn_file=asn_file, list=sky_images[GRISM], add_constant=False, column_average=column_average & (i == (sky_iter-1)), mask_grow=mask_grow, flat_correct=False)
    else:
        for exp in asn.exposures:
            threedhst.grism_sky.remove_grism_sky(flt='%s_flt.fits' %(exp), list=sky_images[GRISM],  path_to_sky = os.getenv('THREEDHST')+'/CONF/', out_path='./', verbose=False, plot=False, flat_correct=first_run, sky_subtract=True, second_pass=column_average, overall=True, combine_skies=False, sky_components=True, add_constant=False)
            
    ### Astrodrizzle again to reflag CRs and make cleaned mosaic
    drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, skysub=False, skyuser='******', final_wcs=True, final_scale=final_scale, final_pixfrac=0.8, context=False, resetbits=4096, final_bits=576, driz_sep_bits=576, preserve=False, driz_cr_snr='8.0 5.0', driz_cr_scale='2.5 0.7') # , final_wcs=True, final_rot=0)
コード例 #36
0
def get_vizier_cat(image='RXJ2248-IR_sci.fits', ext=0, catalog="II/246"):
    """
    Get a list of RA/Dec coords from a Vizier catalog that can be used
    for WCS alignment.
    
    `catalog` is any catalog ID recognized by Vizier, e.g.: 
        "II/328/allwise": WISE
        "II/246": 2MASS
        "V/139": SDSS DR9
    """
    import threedhst.dq
    import astropy.wcs as pywcs
    from astropy.table import Table as table
    import astropy.io.fits as pyfits
    
    import astroquery
    from astroquery.vizier import Vizier
    import astropy.coordinates as coord
    import astropy.units as u
    
    im = pyfits.open(image)
    
    wcs = pywcs.WCS(im[ext].header)
    #wcs = pywcs.WCS(pyfits.getheader('Q0821+3107-F140W_drz.fits', 1))

    Vizier.ROW_LIMIT = -1
            
    r0, d0 = wcs.wcs_pix2world([[im[ext].header['NAXIS1']/2., im[ext].header['NAXIS2']/2.]], 1)[0]
    foot = wcs.calc_footprint()
    
    corner_radius = np.sqrt((foot[:,0]-r0)**2/np.cos(d0/360.*2*np.pi)**2 + (foot[:,1]-d0)**2).max()*60*1.1

    try:
        c = coord.ICRS(ra=r0, dec=d0, unit=(u.deg, u.deg))
    except:
        c = coord.ICRSCoordinates(ra=r0, dec=d0, unit=(u.deg, u.deg))
        
    #### something with astropy.coordinates
    # c.icrs.ra.degree = c.icrs.ra.degrees
    # c.icrs.dec.degree = c.icrs.dec.degrees
    #
    vt = Vizier.query_region(c, radius=u.Quantity(corner_radius, u.arcminute), catalog=[catalog])
    if not vt:
        threedhst.showMessage('No matches found in Vizier %s @ (%.6f, %.6f).\n\nhttp://vizier.u-strasbg.fr/viz-bin/VizieR?-c=%.6f+%.6f&-c.rs=8' %(catalog, r0, d0, r0, d0), warn=True)
        return False
    
    vt = vt[0]
            
    #### Make a region file
    ra_list, dec_list = vt['RAJ2000'], vt['DEJ2000']
    print 'Vizier, found %d objects in %s.' %(len(ra_list), catalog)
    
    fp = open('%s.vizier.radec' %(image.split('.fits')[0]), 'w')
    fpr = open('%s.vizier.reg' %(image.split('.fits')[0]), 'w')
    
    fp.write('# %s, r=%.1f\'\n' %(catalog, corner_radius))
    fpr.write('# %s, r=%.1f\'\nfk5\n' %(catalog, corner_radius))
    for ra, dec in zip(ra_list, dec_list):
        fp.write('%.7f %.7f\n' %(ra, dec))
        fpr.write('circle(%.6f, %.6f, 0.5")\n' %(ra, dec))
    
    fpr.close()
    fp.close()
    
    return True
コード例 #37
0
ファイル: shifts.py プロジェクト: themiyan/threedhst
def refine_shifts(ROOT_DIRECT='f160w',
                  ALIGN_IMAGE='../../ACS/h_sz*drz_img.fits',
                  fitgeometry='shift',
                  clean=True,
                  ALIGN_EXTENSION=0,
                  shift_params=None,
                  toler=3,
                  maxtoler=5,
                  align_sdss_ds9=False,
                  verbose=False):
    """
refine_shifts(ROOT_DIRECT='f160w',
              ALIGN_IMAGE='../../ACS/h_sz*drz_img.fits',
              fitgeometry='shift', clean=True)
                
    Refine shifts by catalog matching an input multidrizzle image, 
    ROOT_DIRECT+'_drz.fits' to one or more alignment images
    """

    run = threedhst.prep_flt_files.MultidrizzleRun(ROOT_DIRECT.upper())

    ## radius for match is 2**toler.  Make it larger if fit comes out bad
    #toler, maxtoler = 3, 5
    iter, MAXIT = 0, 5
    xrms, yrms = 100, 100
    if shift_params is not None:
        xshift, yshift, rot, scale = shift_params
        threedhst.showMessage('Using specified DRZ-frame shifts: %f %f %f %f' %
                              (xshift, yshift, rot, scale))
    else:
        threedhst.showMessage('Aligning WCS to %s (%s)' %
                              (threedhst.options['ALIGN_IMAGE'], fitgeometry))
        while ((xrms > 1) | (yrms > 1)) & (toler <= maxtoler) & (iter < MAXIT):
            iter = iter + 1
            xshift, yshift, rot, scale, xrms, yrms = threedhst.shifts.align_to_reference(
                ROOT_DIRECT,
                ALIGN_IMAGE,
                fitgeometry=fitgeometry,
                clean=clean,
                ALIGN_EXTENSION=ALIGN_EXTENSION,
                toler=toler,
                skip_swarp=(toler > 3),
                align_sdss_ds9=align_sdss_ds9,
                verbose=verbose)
            toler += 1

    #### shifts measured in DRZ frame.  Translate to FLT frame
    drz = pyfits.open(ROOT_DIRECT + '_drz.fits')
    #alpha = (180.-drz[1].header['PA_APER'])/360.*2*np.pi
    #### Get reference angle from first image in the ASN file
    asn = threedhst.utils.ASNFile(ROOT_DIRECT + '_asn.fits')
    alpha = (180. - pyfits.getheader(asn.exposures[0] + '_flt.fits',
                                     1)['PA_APER']) / 360. * 2 * np.pi

    xsh = (xshift * np.cos(alpha) - yshift * np.sin(alpha)) * np.float(run.scl)
    ysh = (xshift * np.sin(alpha) + yshift * np.cos(alpha)) * np.float(run.scl)

    print 'Final shift:', xsh, ysh, drz[1].header['PA_APER']
    fp = open(ROOT_DIRECT + '_align.info', 'w')
    fp.write('%s %8.3f %8.3f %8.3f\n' % (ALIGN_IMAGE, xsh, ysh, rot))
    fp.close()

    #### Read the shiftfile
    shiftF = threedhst.shifts.ShiftFile(ROOT_DIRECT + '_shifts.txt')

    #### Apply the alignment shifts to the shiftfile
    shiftF.xshift = list(np.array(shiftF.xshift) - xsh)
    shiftF.yshift = list(np.array(shiftF.yshift) - ysh)
    shiftF.rotate = list((np.array(shiftF.rotate) + rot) % 360)
    shiftF.scale = list(np.array(shiftF.scale) * scale)

    shiftF.write(ROOT_DIRECT + '_shifts.txt')
コード例 #38
0
ファイル: catIO.py プロジェクト: kewhitaker/threedhst
 def __init__(self, infile='files.info', force_lowercase = True,
              comment_char='#', verbose=False, save_fits = True):
     
     self.filename = infile
     self.verbose = verbose
     
     #### Load the FITS version of the catalog, if it exists
     status = self.load_fits()
     if status:
         return None
         
     self.comment_char = comment_char
     
     #### read the lines of the file
     fp = open(infile,'r')
     lines = fp.readlines()
     fp.close()
     
     if len(lines) < 2:
         threedhst.showMessage('Only %d lines in %s.' %(len(lines), infile), warn=True)
         self.status = None
         return None
     
     if not lines[0].startswith(comment_char):
         threedhst.showMessage('First line of %s doesn\'t start with \'%s\':\n%s' %(infile,
                                comment_char, lines[0]), warn=True)
         self.status = None
         return None
         
     #### get the column names from the first line
     header = lines[0]
     columns = header.replace(comment_char,'').split()
     NCOLUMNS = len(columns)
     
     #### parse column names, fixing characters.
     dict = {}
     for i in range(NCOLUMNS):
         if verbose > 1:
             print columns[i]
         col = columns[i].replace('-','_').replace('.','p')
         if force_lowercase:
             col = col.lower()
         for str in '()[]':
             col = col.replace(str,'')
         #
         if col[0].isdigit():
             col = '_'+col
         #    
         columns[i] = col
         dict[col] = []
     
     #### skip header lines
     ix=0
     line = lines[ix]
     while line.startswith(comment_char) & (ix < len(lines)-1):
         ix+=1
         line = lines[ix]
     
     if ix == len(lines):
         self.status = None
         return None
         
     #### Parse the lines into the data columns
     N=0
     for line in lines[ix:]:
         spl = line.split()
         if len(spl) == NCOLUMNS:
             N+=1
             for i in range(NCOLUMNS):
                 dict[columns[i]].append(spl[i])
     
     if verbose > 3:
         print dict
         
     #### Convert data to numpy arrays and change data types from
     #### strings to int and float as necessary
     for col in columns:
         dict[col] = np.array(dict[col])
         item = dict[col][0]
         try:
             fl = float(item)
             isNumber = True
         except:
             isNumber = False
         
         if isNumber:
             try:
                 dict[col] = np.cast[float](dict[col])
             except:
                 pass
         #int
         #print 'x '+item+' x'
         if item.isdigit():
             try:
                 dict[col] = np.cast[int](dict[col])
             except:
                 pass
                 
         str = 'self.%s = dict[col]' %(col)
         #print str
         exec(str)
         
     self.NCOLUMNS = NCOLUMNS
     self.columns = columns
     self.N = N
     self.status = True
     
     if save_fits:
         self.write_fits()
コード例 #39
0
ファイル: sex.py プロジェクト: irhamta/eazy-photoz
    def swarpImage(self, inputImage, mode='direct', verbose=True):
        """
        swarpImage(self,inputImage,mode='waiterror', verbose=True)
        
        Writes configuration files and runs swarp on the input image
        
        mode can be:
        
        * 'waiterror': waits for swarp to finish, and raises an 
          SExtractorError if it does not complete sucessfully. stdout 
          and sterr are saved to self.lastout and self.lasterr (returns 0)
        * 'wait': waits for swarp to finish and returns the return code
          stdout and sterr are saved to self.lastout and self.lasterr
        * 'proc': stars the processes but does not wait - returns the Popen 
          instance of the processes
        """
        from subprocess import Popen, PIPE
        from os.path import exists

        self.swarpInputImage = inputImage

        fnbase = self.name
        if not self.overwrite:
            fnbase = fnbase.replace('.swarp', '')
            if exists(fnbase + '.swarp'):
                fns = fnbase.split('-')
                try:
                    i = int(fns[-1])
                    i += 1
                except ValueError:
                    i = 2
                if len(fns) < 2:
                    fns.append(str(i))
                else:
                    fns[-1] = str(i)
                fnbase = '-'.join(fns)
            self.name = fnbase

        self._saveFiles(fnbase)
        if isinstance(inputImage, list):
            imgList = ' '.join(inputImage)
        else:
            imgList = inputImage

        clstr = 'swarp %s -c %s' % (imgList, self.name + '.swarp')

        #print "\n3DHST.sex.swarp.swarpImage:\n\n %s\n" %clstr
        if verbose:
            threedhst.showMessage('Running swarp: %s' % clstr)

        if mode == 'waiterror' or mode == 'wait':
            # proc = Popen(clstr.split(),
            #              executable='swarp',stdout=PIPE,stderr=PIPE)
            #### Send STDERR output to temporary file because
            #### swarp seems to spawn additional processed for large files
            #### and proc.wait() never finishes
            fp = open('sex_stderr', 'w')
            proc = Popen(clstr.split(),
                         executable='swarp',
                         stdout=PIPE,
                         stderr=fp)
            res = proc.wait()
            fp.close()

            if verbose:
                print('Done.\n')

            sout, serr = proc.communicate()

            ## Read stderr output
            fp = open('sex_stderr', 'r')
            serr = ' '.join(fp.readlines())
            fp.close()

            self.lastout = sout
            self.lasterr = serr

            if res != 0 and mode == 'waiterror':
                raise SError(serr, sout)
            return res
        elif mode == 'proc':
            return proc
        elif mode == 'direct':
            proc = Popen(
                clstr.split())  #,executable='swarp' #,stdout=PIPE,stderr=PIPE)
            res = proc.wait()
        else:
            raise ValueError('unrecognized mode argument ' + str(mode))
コード例 #40
0
ファイル: grism_sky.py プロジェクト: AyushYadav/threedhst
def remove_visit_sky(asn_file='GDN12-G102_asn.fits', list=['zodi_G102_clean.fits', 'excess_G102_clean.fits'], add_constant=False, column_average=True, mask_grow=18, flat_correct=True):
    """
    Require that all exposures in a visit have the same zodi component.
    """
    from scipy.linalg import lstsq
    import scipy.optimize
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits
    
    import copy
    
    import threedhst.grism_sky as bg
    
    asn = threedhst.utils.ASNFile(asn_file)
    
    flt = pyfits.open('%s_flt.fits' %(asn.exposures[0]))
    bg.set_grism_flat(grism=flt[0].header['FILTER'], verbose=True)
    
    if flat_correct:
        flat = bg.flat*1.
    else:
        flat = bg.flat*0.+1
        
    data = []
    whts = []
    masks = []
    for exp in asn.exposures:
        flt = pyfits.open('%s_flt.fits' %(exp))
        segfile = '%s_flt.seg.fits' %(exp)
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=18) == 0
        dq_ok = (flt[3].data & (4+32+16+512+2048+4096)) == 0
        #
        flat_corr = flt[1].data*flat
        mask = seg_mask & dq_ok 
        mask &= (flat_corr < np.percentile(flat_corr[mask], 98)) & (flt[2].data > 0) & (flat_corr > np.percentile(flat_corr[mask], 1))
        #
        data.append(flat_corr.flatten())
        whts.append(1/flt[2].data.flatten()**2)
        masks.append(mask.flatten())
    
    data = np.array(data)
    whts = np.array(whts)
    masks = np.array(masks)
    
    #### Read in the master skies    
    ims = []
    skies = copy.deepcopy(list)
    
    for sky in skies:
        ims.append(pyfits.open(os.getenv('THREEDHST') + '/CONF/' + sky)[0].data.flatten())
    
    if add_constant:
        ims.append(flt[1].data.flatten()*0.+1)
        skies.append('Constant')
    
    ims = np.array(ims)

    #### Do the fit
    tol=1.49e-8  # not sure what this controls
    
    p0 = np.ones((ims.shape[0]-1)*len(asn.exposures)+1)
    popt = scipy.optimize.leastsq(bg.obj_lstsq_visit, p0, args=(data, ims, whts, masks), full_output=True, ftol=tol/1000., xtol=tol/1000.)
    xcoeff = popt[0]
    
    sh_temp = ims.shape
    logstr = 'Master grism sky: %s\n\n FLT   %s\n' %(asn_file, '  '.join(skies))
    
    for i in range(len(asn.exposures)):
        coeff = np.zeros(sh_temp[0])
        coeff[0] = xcoeff[0]
        coeff[1:] = xcoeff[1+i*(sh_temp[0]-1):1+(i+1)*(sh_temp[0]-1)]
        bg_model = np.dot(coeff, ims).reshape((1014,1014))
        logstr += '%s  %s\n' %(asn.exposures[i], ''.join([' %9.4f' %(c) for c in coeff]))
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[i]), mode='update')
        flt[1].data = flt[1].data*flat - bg_model
        for j in range(sh_temp[0]):
            if 'GSKY%02d' %(j) in flt[0].header:
                flt[0].header['GSKY%02d' %(j)] += coeff[j]
            else:
                flt[0].header['GSKY%02d' %(j)] = (coeff[j], 'Master sky: %s' %(skies[j]))
        #
        flt[1].header['MDRIZSKY'] = 0.
        if 'SKYFLAT' in flt[0].header.keys():
            flt[0].header['SKYFLAT'] = (flat_correct | flt[0].header['SKYFLAT'], 'Direct image flat applied')
        else:
            flt[0].header['SKYFLAT'] = (flat_correct, 'Direct image flat applied')
        flt.flush()
        
    threedhst.showMessage(logstr)
    
    if column_average:
        #for iter in range(2):
        #grism_sky_column_average(asn_file=asn_file, mask_grow=mask_grow)
        grism_sky_column_average_GP(asn_file=asn_file, mask_grow=mask_grow)
コード例 #41
0
def runTweakReg(asn_file='GOODS-S-15-F140W_asn.fits', master_catalog='goodss_radec.dat', final_scale=0.06, ACS=False, threshold=5):
    """
    Wrapper around tweakreg, generating source catalogs separately from 
    `findpars`.
    """
    import glob
    import shutil
    
    import drizzlepac
    from drizzlepac import tweakreg
    from stwcs import updatewcs
    
    import threedhst.prep_flt_astrodrizzle
    
    asn = threedhst.utils.ASNFile(asn_file)
    
    if ACS:
        NCHIP=2
        sci_ext = [1,4]
        wht_ext = [2,5]
        ext = 'flc'
        dext = 'crclean'
    else:
        NCHIP=1
        sci_ext = [1]
        wht_ext = [2]
        ext = 'flt'
        dext = 'flt'
        
    ### Generate CRCLEAN images
    for exp in asn.exposures:
        updatewcs.updatewcs('%s_%s.fits' %(exp, ext))
    
    has_crclean = True
    for exp in asn.exposures:
        has_crclean &= os.path.exists('%s_crclean.fits' %(exp))
    
    threedhst.showMessage('# exposures: %d' %(len(asn.exposures)))
    
    if not has_crclean: 
        if len(asn.exposures) == 1:
            drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=False, context=False, preserve=False, skysub=True, driz_separate=False, driz_sep_wcs=False, median=False, blot=False, driz_cr=False, driz_cr_corr=False, driz_combine=True)
            shutil.copy('%s_%s.fits' %(asn.exposures[0], ext), '%s_crclean.fits' %(asn.exposures[0]))
        else:
            drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=False, context=False, preserve=False, skysub=True, driz_separate=True, driz_sep_wcs=True, median=True, blot=True, driz_cr=True, driz_cr_corr=True, driz_combine=True)
        
    #### Make SExtractor source catalogs in *each* flt
    for exp in asn.exposures:
        #updatewcs.updatewcs('%s_%s.fits' %(exp, ext))
        for i in range(NCHIP):
            se = threedhst.sex.SExtractor()
            se.options['WEIGHT_IMAGE'] = '%s_%s.fits[%d]' %(exp, dext, wht_ext[i]-1)
            se.options['WEIGHT_TYPE'] = 'MAP_RMS'
            #
            se.params['X_IMAGE'] = True; se.params['Y_IMAGE'] = True
            se.params['MAG_AUTO'] = True
            #
            se.options['CATALOG_NAME'] = '%s_%s_%d.cat' %(exp, ext, sci_ext[i])
            se.options['FILTER'] = 'N'
            se.options['DETECT_THRESH'] = '%f' %(threshold)
            se.options['ANALYSIS_THRESH'] = '%f' %(threshold)
            #
            se.sextractImage('%s_%s.fits[%d]' %(exp, dext, sci_ext[i]-1))
            threedhst.sex.sexcatRegions('%s_%s_%d.cat' %(exp, ext, sci_ext[i]), '%s_%s_%d.reg' %(exp, ext, sci_ext[i]), format=1)
    
    #### TweakReg catfile
    asn_root = asn_file.split('_asn')[0]
    catfile = '%s.catfile' %(asn_root)
    fp = open(catfile,'w')
    for exp in asn.exposures:
        line = '%s_%s.fits' %(exp, ext)
        for i in range(NCHIP):
            line += ' %s_%s_%d.cat' %(exp, ext, sci_ext[i])
        
        fp.write(line + '\n')
    
    fp.close()
    
    #### First run AstroDrizzle mosaic
    #drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, context=False, preserve=False, skysub=True, driz_separate=False, driz_sep_wcs=False, median=False, blot=False, driz_cr=False, driz_combine=True)
    
    #### Make room for TWEAK wcsname
    for exp in asn.exposures:
        threedhst.prep_flt_astrodrizzle.clean_wcsname(flt='%s_%s.fits' %(exp, ext), wcsname='TWEAK', ACS=ACS)
    
    #### Main run of TweakReg
    if ACS:
        refimage = '%s_drc_sci.fits' %(asn_root)
    else:
        refimage = '%s_drz_sci.fits' %(asn_root)
        
    tweakreg.TweakReg(asn_file, refimage=refimage, updatehdr=True, updatewcs=True, catfile=catfile, xcol=2, ycol=3, xyunits='pixels', refcat=master_catalog, refxcol=1, refycol=2, refxyunits='degrees', shiftfile=True, outshifts='%s_shifts.txt' %(asn_root), outwcs='%s_wcs.fits' %(asn_root), searchrad=5, tolerance=12, wcsname='TWEAK', interactive=False, residplot='No plot', see2dplot=False, clean=True, headerlet=True, clobber=True)
    
    #### Run AstroDrizzle again
    if ACS:
        drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, final_scale=final_scale, final_pixfrac=0.8, context=False, resetbits=4096, final_bits=576, preserve=False)
    else:
        if len(asn.exposures) == 1:
            drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, final_scale=final_scale, final_pixfrac=0.8, context=False, resetbits=4096, final_bits=576, driz_sep_bits=576, preserve=False, driz_cr_snr='8.0 5.0', driz_cr_scale = '2.5 0.7', driz_separate=False, driz_sep_wcs=False, median=False, blot=False, driz_cr=False, driz_cr_corr=False) # , final_wcs=True, final_rot=0)
        else:
            drizzlepac.astrodrizzle.AstroDrizzle(asn_file, clean=True, final_scale=final_scale, final_pixfrac=0.8, context=False, resetbits=4096, final_bits=576, driz_sep_bits=576, preserve=False, driz_cr_snr='8.0 5.0', driz_cr_scale = '2.5 0.7') # , final_wcs=True, final_rot=0)
        
    for exp in asn.exposures:
        files=glob.glob('%s*coo' %(exp))
        files.extend(glob.glob('%s*crclean.fits' %(exp)))
        for file in files:
            os.remove(file)
コード例 #42
0
ファイル: grism_sky.py プロジェクト: AyushYadav/threedhst
def grism_sky_column_average(asn_file='GDN12-G102_asn.fits', iter=2, mask_grow=8):
    """
    Remove column-averaged residuals from grism exposures
    """
    import scipy.ndimage as nd
    import astropy.io.fits as pyfits
    
    asn = threedhst.utils.ASNFile(asn_file)
            
    for k in range(len(asn.exposures)):
        #### 1D column averages
        flt = pyfits.open('%s_flt.fits' %(asn.exposures[k]), mode='update')
        segfile = '%s_flt.seg.fits' %(asn.exposures[k])
        seg = pyfits.open(segfile)[0].data
        seg_mask = nd.maximum_filter((seg > 0), size=mask_grow) == 0
        dq_ok = (flt[3].data & (4+32+16+512+2048+4096)) == 0
        
        mask = seg_mask & dq_ok & (flt[2].data > 0)
        
        #### Iterative clips on percentile
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 98)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 2))
        #mask &= (flt[1].data < np.percentile(flt[1].data[mask], 84)) & (flt[2].data > 0) & (flt[1].data > np.percentile(flt[1].data[mask], 16))
                    
        residuals = []
        for j in range(iter):
            masked = flt[1].data*1
            masked[~mask] = np.nan
            yres = np.zeros(1014)
            for i in range(1014):
                # ymsk = mask[:,i]
                # yres[i] = np.median(flt[1].data[ymsk,i])
                ymsk = masked[:,i]
                #ymsk = masked[:,np.maximum(i-10,0):i+10]
                #yres[i] = np.median(ymsk[np.isfinite(ymsk)])
                ok = np.isfinite(ymsk)
                ymsk[(ymsk > np.percentile(ymsk[ok], 84)) | (ymsk < np.percentile(ymsk[ok], 16))] = np.nan
                yres[i] = np.mean(ymsk[np.isfinite(ymsk)])
                
            #
            resid = threedhst.utils.medfilt(yres, 41)
            #
            #resid = np.dot(np.ones((1014,1)), yres_sm.reshape(1,1014))
            flt[1].data -= resid
            residuals.append(resid*1)
            
        threedhst.showMessage('Remove column average: %s' %(asn.exposures[k]))
        flt.flush()
        #flt.writeto(flt.filename(), clobber=True)
        
        #plt.plot(yres_sm)
        
        ### Make figure
        from matplotlib.figure import Figure
        from matplotlib.backends.backend_agg import FigureCanvasAgg
        
        fig = Figure(figsize=[6,4], dpi=100)

        fig.subplots_adjust(wspace=0.25,hspace=0.02,left=0.15,
                            bottom=0.08,right=0.97,top=0.92)

        ax = fig.add_subplot(111)
        ax.set_xlim(0,1014)
        ax.set_title(flt.filename())

        ax.plot(yres, color='black')
        for resid in residuals:
            ax.plot(resid, color='red', linewidth=2, alpha=0.7)
        
        ax.set_xlim(0,1014)

        canvas = FigureCanvasAgg(fig)
        canvas.print_figure(flt.filename().split('.fits')[0] + '.column.png', dpi=100, transparent=False)
コード例 #43
0
ファイル: interp.py プロジェクト: gbrammer/unicorn
def test():

    import interp_c
    import time
    import scipy
    import threedhst
    import numpy as np

    N = int(1.e6)

    xfull = np.arange(0, N + 1, 1) * 1.
    #yfull = np.sin(xfull/(N/1239.)*2*np.pi)+1
    yfull = np.sin(xfull / np.pi / 2 / 20) + 0.2

    # coeffs = np.random.random(size=12)*5
    # yfull = scipy.polyval(coeffs, xfull)

    xint = np.arange(0, N + 1, N / 100) * 1.

    tstart = time.time()
    denom = np.trapz(yfull, xfull)

    tstart = time.time()
    yint_0 = np.interp(xint, xfull, yfull)
    t0 = time.time()
    print 'Linear           : %.3f   (%.4e)' % (
        t0 - tstart, np.trapz(yint_0, xint) / denom - 1)

    yint_x = interp_c.interp_c(xint, xfull, yfull)
    tx = time.time()
    print 'Linear(c)        : %.3f   (%.4e)' % (
        tx - t0, np.trapz(yint_x, xint) / denom - 1)

    xreverse = xint[::-1]
    yint_y = interp_c.interp_c(xreverse, xfull, yfull, assume_sorted=0)
    ty = time.time()
    print 'Linear(c) rev    : %.3f   (%.4e)' % (
        ty - tx, np.trapz(yint_y, xint) / denom - 1)

    yint_1 = threedhst.utils.interp_conserve(xint, xfull, yfull)
    t1 = time.time()
    print 'Conserve         : %.3f   (%.4e)' % (
        t1 - ty, np.trapz(yint_1, xint) / denom - 1)

    yint_2 = interp_c.interp_conserve(xint, xfull, yfull)
    t2 = time.time()
    print 'Conserve (Cython): %.3f   (%.4e)' % (
        t2 - t1, np.trapz(yint_2, xint) / denom - 1)

    yint_3 = interp_c.interp_conserve_c(xint, xfull, yfull)
    t3 = time.time()
    print 'Conserve (more c): %.3f   (%.4e)' % (
        t3 - t2, np.trapz(yint_3, xint) / denom - 1)

    yint_4 = threedhst.utils.interp_conserve_c(xint, xfull, yfull)
    t4 = time.time()
    print 'Inline c         : %.3f   (%.4e)' % (
        t4 - t3, np.trapz(yint_4, xint) / denom - 1)

    #### Test interpolation
    threedhst.showMessage('Interpolation')

    #### Faster while n(int)/n(full) < 1./50

    xint = xfull[1000:-1000:40]

    tstart = time.time()
    yint = np.interp(xint, xfull, yfull, left=0., right=0.)
    t0 = time.time()
    print 'Python         : %.4f' % (t0 - tstart)

    yint1 = interp_c.interp_c(xint, xfull, yfull, extrapolate=0.)
    t1 = time.time()
    print 'Cython rewrite : %.4f   (%.2e)' % (t1 - t0, np.sum(
        (yint1 - yint)**2))

    #### Test midpoint definition --- slices work better than by hand

    threedhst.showMessage('Midpoint')
    xmid = xfull

    tstart = time.time()
    midpoint = (xmid[1:] + xmid[:-1]) / 2.
    midpoint = np.append(midpoint, np.array([xmid[0], xmid[-1]]))
    midpoint = midpoint[np.argsort(midpoint)]
    t0 = time.time()
    print 'Python      :  %.3f  %.2e' % (t0 - tstart,
                                         np.sum((midpoint - midpoint)**2))

    midpoint_c1 = interp_c.midpoint(xmid)
    t1 = time.time()
    print 'Cython      :  %.3f  %.2e' % (t1 - t0,
                                         np.sum((midpoint_c1 - midpoint)**2))

    midpoint_c2 = interp_c.midpoint_c(xmid, N + 1)
    t2 = time.time()
    print 'Cython (opt):  %.3f  %.2e' % (t2 - t1,
                                         np.sum((midpoint_c2 - midpoint)**2))