Exemplo n.º 1
0
def checkfile(filename):
    '''Print statistics and run open imexamine task'''

    iraf.imstatistics.unlearn()
    iraf.imexamine.unlearn()
    print 'Check output file:'
    iraf.imstatistics(filename)
    print ' Running "imexamine" task..'
    iraf.imexamine(filename, 1)
Exemplo n.º 2
0
def checkfile(filename):
    '''Print statistics and run open imexamine task'''

    iraf.imstatistics.unlearn()
    iraf.imexamine.unlearn()
    print 'Check output file:'
    iraf.imstatistics(filename)
    print ' Running "imexamine" task..'
    iraf.imexamine(filename, 1)
Exemplo n.º 3
0
def makeNirc2mask(dark, flat, outDir):
    """Make the static bad pixel mask for NIRC2. This only needs to be
    run once. This creates a file called nirc2mask.fits which is
    subsequently used throughout the pipeline. The dark should be a long
    integration dark.
    
    @param dark: The full absolute path to a medianed dark file. This is 
        used to construct a hot pixel mask (4 sigma detection thresh).
    @type dark: str
    @param flat: The full absolute path to a medianed flat file. This is
         used to construct a dead pixel mask.
    @type flat: str
    @param outDir: full path to output directory with '/' at the end.
    @type outDir: str
    """
    _out = outDir + 'nirc2mask.fits'
    _dark = dark
    _flat = flat

    util.rmall([_out])

    # Make hot pixel mask
    text_output = ir.imstatistics(_dark, fields="mean,stddev", 
				  nclip=10, format=0, Stdout=1)
    values = text_output[0].split()
    hi = float(values[0]) + (15.0 * float(values[1]))

    img_dk = pyfits.getdata(_dark)
    hot = img_dk > hi
    print 'Found %d hot pixels' % (hot.sum())

    # Make dead pixel mask
    text_output = ir.imstatistics(_flat, fields="mean,stddev", 
				  nclip=10, format=0, Stdout=1)
    values = text_output[0].split()

    # Assuming flat is normalized, we don't want pixels with less
    # than 0.5 sensitivity
    #lo = float(values[0]) - (15.0 * float(values[1]))
    lo = 0.5    #mask = hot

    hi = float(values[0]) + (15.0 * float(values[1]))

    img_fl = pyfits.getdata(_flat)
    dead = logical_or(img_fl > hi, img_fl < lo)
    print 'Found %d dead pixels' % (dead.sum())

    # Combine into a final supermask
    file = pyfits.open(_flat)

    mask = hot + dead
    mask = (mask != 0)
    unmask = (mask == 0)
    file[0].data[unmask] = 0
    file[0].data[mask] = 1
    file[0].writeto(_out, output_verify='silentfix')
Exemplo n.º 4
0
def run_daofind(image, wht='NA', extension=0, outfile='default',dthreshold=3.0, fwhmpsf=2.5, backsigma=-1.0,rdnoise=-1.0):
	'''RUN DAOFIND ON INPUT IMAGE'''

	# Parse input parameters
	if outfile == 'default': outfile = image+'0.coo.1'

	# Read in fits header
	f = pyfits.open(image)
	fheader = f[0].header

	# Extract relevant info from the header (exposure, filter, input/output pixel scale)
	exptime = fheader['exptime']
	instr = fheader['INSTRUME']
	if instr == 'WFC3':
		filter = fheader['FILTER']
	else: #assuming ACS
		filter = fheader['FILTER1']
		if filter[0] == 'C': filter == fheader['FILTER2']
	opxscl=0.03962
	ipxscl=0.03962
	f.close()

        # Assign number of flt images (IR/calibration images only have 1 chip; NDRIZIM keyword includes both chips from single FLT)
	if (fheader['detector'] == 'IR'): nchips = 1.0						# IR
	elif (fheader['subarray'] == True) and (len(fheader['CCDAMP']) == 1): nchips = 1.0	# UVIS sub-array
	elif (fheader['detector'] == 'UVIS') and (fheader['subarray'] == False): nchips = 2.0	# UVIS full-frame
        else: raise exception('Image type is not defined.')
	num_flts = 1.0


	# Perform read noise correction
	if rdnoise < 0.0:
		amps = fheader['CCDAMP']
		rdnoise = np.zeros(len(amps))
		for namp in xrange(len(amps)): rdnoise[namp] = fheader['READNSE'+amps[namp]]
	rdnoise_corr = np.sqrt(num_flts * (np.average(rdnoise) * opxscl/ipxscl)**2)


	# Perform background noise calculation
	if backsigma < 0.0:
                backstats=iraf.imstatistics(image+'[1]', fields='stddev', lower = -100, upper = 100, nclip=5, \
                                                lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
		backsigma=float(backstats[0])


	# remove old daofind files
	file_query = os.access(outfile, os.R_OK)	
	if file_query == True: os.remove(outfile)
	iraf.daofind.unlearn()
	iraf.daofind(image=image+'[1]', interactive='no', verify='no',output=outfile, fwhmpsf=fwhmpsf, sigma=backsigma, \
	readnoise=rdnoise_corr, itime=exptime, threshold=dthreshold, datamin=-10, datamax=100000)


	# Display results of daofind (***WORK IN PROGRESS***)
	#os.system('ds9&')
	#tmp=image.split('_cnts')
	#iraf.display(tmp[0]+'.fits',1, zscale='no', zrange='no', z1=0, z2=100,ztrans='log')
	#iraf.tvmark(1,outfile,mark = 'circle', radii = 8, color = 205)

	return outfile		# return name of coordinate file
Exemplo n.º 5
0
def run_daofind(image, wht='NA', extension=0, outfile='default',dthreshold=3.0, fwhmpsf=2.5, backsigma=-1.0,rdnoise=-1.0):
	'''RUN DAOFIND ON INPUT IMAGE'''

	# Parse input parameters
	if outfile == 'default': outfile = image+'0.coo.1'

	# Read in fits header
	f = pyfits.open(image)
	fheader = f[0].header

	# Extract relevant info from the header (exposure, filter, input/output pixel scale)
	exptime = fheader['exptime']
	instr = fheader['INSTRUME']
	if instr == 'WFC3':
		filter = fheader['FILTER']
	else: #assuming ACS
		filter = fheader['FILTER1']
		if filter[0] == 'C': filter == fheader['FILTER2']
	opxscl=0.03962
	ipxscl=0.03962
	f.close()

        # Assign number of flt images (IR/calibration images only have 1 chip; NDRIZIM keyword includes both chips from single FLT)
	if (fheader['detector'] == 'IR'): nchips = 1.0						# IR
	elif (fheader['subarray'] == True) and (len(fheader['CCDAMP']) == 1): nchips = 1.0	# UVIS sub-array
	elif (fheader['detector'] == 'UVIS') and (fheader['subarray'] == False): nchips = 2.0	# UVIS full-frame
        else: raise exception('Image type is not defined.')
	num_flts = 1.0


	# Perform read noise correction
	if rdnoise < 0.0:
		amps = fheader['CCDAMP']
		rdnoise = np.zeros(len(amps))
		for namp in xrange(len(amps)): rdnoise[namp] = fheader['READNSE'+amps[namp]]
	rdnoise_corr = np.sqrt(num_flts * (np.average(rdnoise) * opxscl/ipxscl)**2)


	# Perform background noise calculation
	if backsigma < 0.0:
                backstats=iraf.imstatistics(image+'[0]', fields='stddev', lower = -100, upper = 100, nclip=5, \
                                                lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
		backsigma=float(backstats[0])


	# remove old daofind files
	file_query = os.access(outfile, os.R_OK)	
	if file_query == True: os.remove(outfile)
	iraf.daofind.unlearn()
	iraf.daofind(image=image+'[0]', interactive='no', verify='no',output=outfile, fwhmpsf=fwhmpsf, sigma=backsigma, \
	readnoise=rdnoise_corr, itime=exptime, threshold=dthreshold, datamin=-10, datamax=100000)


	# Display results of daofind (***WORK IN PROGRESS***)
	#os.system('ds9&')
	#tmp=image.split('_cnts')
	#iraf.display(tmp[0]+'.fits',1, zscale='no', zrange='no', z1=0, z2=100,ztrans='log')
	#iraf.tvmark(1,outfile,mark = 'circle', radii = 8, color = 205)

	return outfile		# return name of coordinate file
Exemplo n.º 6
0
def makeflats():
    filters = ['J', '1113', '1184']
    flatfiles = ['@FlatfilesJ', '@Flatfiles1113', '@Flatfiles1184']
    for i in range(len(filters)):
        filt = filters[i]
        files = flatfiles[i]
        #files='*'+filt+'*.fits'
        flat = 'flat' + filt
        iraf.imcombine(files,
                       output=flat,
                       combine="median",
                       reject="minmax",
                       scale='median',
                       weight='exposure',
                       statsec="[400:600,400:600]",
                       nlow=2,
                       nhigh=4)
        input = flat + '.fits[300:800,300:800]'
        stats = iraf.imstatistics(input,
                                  fields="mean",
                                  lower=1,
                                  format=0,
                                  Stdout=1)
        print 'stats = ', stats, stats[0]
        ave = float(stats[0])
        iraf.imarith(flat, "/", ave, flat)  #normalize flat
Exemplo n.º 7
0
 def getMaxValue(self, fn):
     result = iraf.imstatistics(fn, usigma=2.5, lsigma=2.5, nclip=3, \
                                format="no", field="max", Stdout=1)
     try:
         maxval=float(result[0])
     except:
         maxval=None
     return(maxval)
Exemplo n.º 8
0
def mask_image(imageFile, resolvedSources, maskSize=150):
    img = pyfits.getdata(imageFile)

    # Calculate the sky level for this image.
    text_output = ir.imstatistics(imageFile,
                                  fields='mean',
                                  nclip=10,
                                  lsigma=10,
                                  usigma=2,
                                  format=0,
                                  Stdout=1)
    backLevel = float(text_output[0])

    backImage = img * 0.0 + backLevel

    imgNoBack = img - backImage

    # Make a mask image.
    for rr in range(len(resolvedSources)):
        coords = resolvedSources[rr]

        if type(maskSize) == int:
            msize = maskSize
        elif len(maskSize) > 1:
            msize = maskSize[rr]

        mask1D = np.hanning(msize)
        mask = -1.0 * np.outer(mask1D, mask1D) + 1.0

        # Find the image borders
        m_xlo = int(coords[1] - msize / 2)
        m_xhi = m_xlo + msize
        m_ylo = int(coords[0] - msize / 2)
        m_yhi = m_ylo + msize
        i_xhi = imgNoBack.shape[0]
        i_yhi = imgNoBack.shape[1]

        if m_xlo < 0:
            mask = mask[abs(m_xlo):, :]
            m_xlo = 0
        if m_ylo < 0:
            mask = mask[:, abs(m_ylo):]
            m_ylo = 0
        if m_xhi > i_xhi:
            mask = mask[:i_xhi - m_xhi, :]
            m_xhi = i_xhi
        if m_yhi > i_yhi:
            mask = mask[:, :i_yhi - m_yhi]
            m_yhi = i_yhi

        imgNoBack[m_xlo:m_xhi, m_ylo:m_yhi] *= mask

        img = imgNoBack + backImage

    return img
Exemplo n.º 9
0
 def statisticsAmongImages(self, fn_list):
     vals=[]
     for fn in fn_list:
         print('%s%s' % (fn, self.statarea))
         result = iraf.imstatistics('%s%s' % (fn, self.statarea), format="no", field="midpt", Stdout=1)
         print(SacraFile.str2float_iraf(result[0]))
         vals.append(SacraFile.str2float_iraf(result[0]))
     c_max = numpy.amax(vals)
     c_min = numpy.amin(vals)
     c_ave = numpy.average(vals)
     c_std = numpy.std(vals)
     print("debug:", c_max, c_min, c_ave, c_std)
     return(c_max, c_min, c_ave, c_std)
Exemplo n.º 10
0
    def printStats(frame, tint, sampmode, reads):
        files = range(frame, frame + 3)

        fileName = "dark_%ds_1ca_%d_%dsm.fits" % (tint, sampmode, reads)

        if skipcombo == False:
            makedark(files, fileName)

        text_output = ir.imstatistics("darks/" + fileName, fields="mean,stddev", nclip=10, format=0, Stdout=1)
        values = text_output[0].split()
        darkMean = float(values[0])
        darkStdv = float(values[1])

        return darkMean, darkStdv
Exemplo n.º 11
0
 def imgAddHeaderSkyLevelEstimate(self, fn_in, skyarea):
     #skyarea = '[x1:x1, y1:y2]'  ## iraf region fromat
     try:
         skylevel = iraf.imstatistics('%s%s' % (fn_in, skyarea),
                                      format='no', field='midpt',
                                      Stdout=1)
         skycor = (-1.0) * float(skylevel[0])
         self.addFitsHeader(fn_in, 'SKYLVCOR', skycor, 
                            'Sky correction estimation in %s' % (skyarea))
         print('%s: SKYLVCOR, %f' % (fn_in, skycor))
     except:
         sys.stderr.write('(SKYLVCOR measureing error in %s' % (fn_in))
         retrun(-1)
     return(0)
Exemplo n.º 12
0
def mask_image(imageFile, resolvedSources, maskSize=150):
    img = pyfits.getdata(imageFile)

    # Calculate the sky level for this image.
    text_output = ir.imstatistics(imageFile, fields='mean', nclip=10, 
                                  lsigma=10, usigma=2, format=0, Stdout=1)
    backLevel = float(text_output[0])
    
    backImage = img * 0.0 + backLevel
        
    imgNoBack = img - backImage

    # Make a mask image.
    for rr in range(len(resolvedSources)):
        coords = resolvedSources[rr]

        if type(maskSize) == int:
            msize = maskSize
        elif len(maskSize) > 1:
            msize = maskSize[rr]

        mask1D = np.hanning(msize)
        mask = -1.0 * np.outer(mask1D, mask1D) + 1.0
        
        # Find the image borders
        m_xlo = coords[1] - msize/2
        m_xhi = m_xlo + msize
        m_ylo = coords[0] - msize/2
        m_yhi = m_ylo + msize
        i_xhi = imgNoBack.shape[0]
        i_yhi = imgNoBack.shape[1]
        
        if m_xlo < 0:
            mask = mask[abs(m_xlo):,:]
            m_xlo = 0
        if m_ylo < 0:
            mask = mask[:,abs(m_ylo):]
            m_ylo = 0
        if m_xhi > i_xhi:
            mask = mask[:i_xhi - m_xhi,:]
            m_xhi = i_xhi
        if m_yhi > i_yhi:
            mask = mask[:,:i_yhi - m_yhi]
            m_yhi = i_yhi

        imgNoBack[m_xlo:m_xhi, m_ylo:m_yhi] *= mask

        img = imgNoBack + backImage

    return img
Exemplo n.º 13
0
def run_daofind(image, extension=0,outfile='default',dthreshold=3.0, fwhmpsf=2.5, backsigma=-1.0,rdnoise=5.2):

	'''THIS PROCEDURE RUNS DAOFIND ON INPUT IMAGE'''

	# Parse input parameters
	if outfile == 'default': outfile = image+'0.coo.1'

	# Read in fits header
	f = pyfits.open(image)
	fheader = f[0].header
	f.close()

	# Extract relevant info from the header
	exptime = fheader['texptime']
	instr = fheader['INSTRUME']
	if instr == 'WFC3':
		filter = fheader['FILTER']
	else: #assuming ACS
		filter = fheader['FILTER1']
		if filter[0] == 'C': filter == fheader['FILTER2']
	ipxscl = fheader['D001ISCL']
	opxscl = fheader['D001SCAL']
	num_flts = float(fheader['NDRIZIM'])/2.0
	#df_max = 10000000.	# upper limit for "good" pixels (sometimes used to ID bad pixels)---Not used here.

	# Perform read noise correction
	rdnoise_corr = np.sqrt(num_flts * (rdnoise * opxscl/ipxscl)**2)
		
	# Perform background noise calculation
	if backsigma < 0.0:
		backrms=iraf.imstatistics(image+'[0]', fields='stddev', nclip=10, lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
		backsigma=float(backrms[0])


	'''Run daofind'''
	# remove old daofind files
	file_query = os.access(outfile, os.R_OK)	
	if file_query == True: os.remove(outfile)
	iraf.daofind.unlearn()
	iraf.daofind(image=image+'[0]', interactive='no', verify='no',output=outfile, fwhmpsf=fwhmpsf, sigma=backsigma, \
	readnoise=rdnoise_corr, itime=exptime, threshold=dthreshold)

	# Display results of daofind (***WORK IN PROGRESS***)
	#!ds9 &
	#iraf.display(file+'['+exten+']',1)
	#iraf.tvmark(1,ofile+'0.coo.1',mark = 'circle', radii = 8, color = 205)

	return outfile		# return name of coordinate file
Exemplo n.º 14
0
    def printStats(frame, tint, sampmode, reads):
        files = range(frame, frame + 3)

        fileName = 'dark_%ds_1ca_%d_%dsm.fits' % (tint, sampmode, reads)

        if (skipcombo == False):
            makedark(files, fileName)

        text_output = ir.imstatistics('darks/' + fileName,
                                      fields="mean,stddev",
                                      nclip=10,
                                      format=0,
                                      Stdout=1)
        values = text_output[0].split()
        darkMean = float(values[0])
        darkStdv = float(values[1])

        return darkMean, darkStdv
Exemplo n.º 15
0
def domask(files, invmask):
    current = files
    #for current in files:
    mfile = "m" + current
    #iraf.mask(current,mask=invmask,stat="[400:600,400:600]")
    mode = iraf.imstatistics(current,
                             fields="mode",
                             lower=1,
                             format=0,
                             Stdout=1)
    mode = float(mode[0])
    print "masking ", current, " with mode = ", mode
    iraf.imarith(1, "-", invmask, "tempmask")
    iraf.imarith(current, "*", "tempmask", mfile)
    iraf.imarith(invmask, "*", mode, "tempmask2")
    iraf.imarith(mfile, "+", "tempmask2", mfile)
    iraf.imdel("tempmask")
    iraf.imdel("tempmask2")
Exemplo n.º 16
0
def run_daophot(image, outfile='default', coordfile='NA', apertures='5.0,16.66', annulus=17.0, dannulus=3.0, calgorithm='centroid', salgorithm='median', fwhmpsf=2.5, backsigma=-1.0,rdnoise=5.2):

	'''THIS PROCEDURE RUNS DAOPHOT ON INPUT IMAGE'''

	# Parse input parameters
	if outfile == 'default': outfile = image + '0.mag.1'
	if coordfile == 'NA': coordfile = image + '0.coo.1'

	# Read in fits header
	f = pyfits.open(image)
	fheader = f[0].header
	f.close()

	# Extract relevant info from the header
	exptime = fheader['texptime']
	filter = fheader['FILTER2']
	ipxscl = fheader['D001ISCL']
	opxscl = fheader['D001SCAL']
	num_flts = float(fheader['NDRIZIM'])/2.0
	dp_zmag = -2.5 * np.log10(float(fheader['PHOTFLAM'])) + float(fheader['PHOTZPT'])       # zero-pt for each filter
	#df_max = 10000000.      # upper limit for "good" pixels (sometimes used to ID bad pixels). Not using this parameter right now.

	# Perform read noise correction
	rdnoise_corr = np.sqrt(num_flts * (rdnoise * opxscl/ipxscl)**2)

	# Perform background noise calculation
	if backsigma < 0.0:
		backrms=iraf.imstatistics(image+'[0]', fields='stddev', nclip=10, lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
		backsigma=float(backrms[0])


	'''Run daophot'''
	# Remove old phot output files
	file_query = os.access(outfile, os.R_OK)      
	if file_query == True: os.remove(outfile)

	# Run phot
	iraf.phot.unlearn()         # reset daophot parameters to default values
	iraf.phot(image=image+'[0]', interactive='no', verify='no', coords=coordfile, output=outfile, fwhmpsf=fwhmpsf, \
      		sigma=backsigma, readnoise=rdnoise_corr, itime=exptime, calgorithm=calgorithm, salgorithm=salgorithm, \
      		annulus=annulus, dannulus=dannulus, apertures=apertures,zmag=dp_zmag)


	return outfile 		# return name of output catalog
Exemplo n.º 17
0
iraf.ccdred.combine.unlearn()
iraf.ccdred.zerocombine.unlearn()
iraf.ccdred.flatcombine.unlearn()
iraf.specred.response.unlearn()

# regular expression of files (e.g bias_00*.fits, flat-2000jan01_?.*)
theflat = str(raw_input('Enter flat image: '))

iraf.specred.extinction = ''
iraf.specred.caldir = ''
iraf.specred.observatory = 'lna'

print 'Create a response for flat...'
iraf.specred.response.interactive = True
iraf.specred.response.high_reject = 3.0
iraf.specred.response.low_reject = 3.0
iraf.specred.response(calibration=theflat,
                      normalization=theflat,
                      response='nFlat')

# check output flat image
print 'openning a ds9 window if not already openned...'
ds9.ds9()

print 'Check output file:'
iraf.imstatistics('nFlat')
print ' Running "imexamine" task..'
iraf.imexamine('nFlat', 1)

print '--- DONE ---'
Exemplo n.º 18
0
                       combine="median",
                       scale="median",
                       stats="[400:600,400:600]",
                       reject="ccdclip",
                       lsigma=5.5,
                       hsig=4.5,
                       rdnoise=17.5,
                       gain=4.35,
                       maskt="goodval",
                       lthres=500,
                       hthres=32000,
                       nkeep=2,
                       blank=-999,
                       grow=3)

    stats = iraf.imstatistics(flat, fields="mean", lower=1, format=0, Stdout=1)
    ave = float(stats[0])
    iraf.imarith(flat, "/", ave, flat)  #normalize flat

    for file in files:
        dfile = 'd' + file
        mfile = 'md' + file
        ffile = 'fmd' + file
        m999file = 'mfmd' + file
        gfile = 'gmfmd' + file
        if npass == 1:
            iraf.imarith(mfile, "/", flat, ffile)
            diminput = fname
        if npass == 2:
            iraf.imdelete(ffile)
            iraf.imarith(mfile, "/", flat, ffile)
Exemplo n.º 19
0
 def __init__(self,imagen,outfile):
     temp=sys.stdout                             # Store original stdout object for later
     sys.stdout =open(outfile,'w')               # redirect all output to the file
     self.test=iraf.imstatistics(images=imagen,fields="stddev,min,max,mode",lower=low,upper=up,lsigma=1,usigma=1)                     
     sys.stdout.close()                          # closing the file
     sys.stdout=temp                             # back to the normal print command
Exemplo n.º 20
0
import pprint
from pyraf import iraf

stats = iraf.imstatistics('../VIS.fits[1]', Stdout=1)
pprint.pprint(stats)
Exemplo n.º 21
0
def makemask(dark, flat, output):
    """Make bad pixel mask for NIRC2 data. Makes a calib/ directory
    and stores all output there. All output and temporary files
    will be created in a masks/ subdirectory. 
    
    @param dark: The full relative path to a dark file. This is used to
        construct a hot pixel mask. Use a long (t>20sec) exposure dark.
    @type dark: str
    @param flat: The full relative path to a flat file. This is used to 
        construct a dead pixel mask. The flat should be normalized.
    @type flat: str
    @param output: output file name. This will be created in the masks/
        subdirectory.
    @type output: str
    """
    redDir = os.getcwd() + '/'
    calDir = redDir + 'calib/'
    maskDir = util.trimdir(calDir + 'masks/')
    flatDir = util.trimdir(calDir + 'flats/')
    darkDir = util.trimdir(calDir + 'darks/')
    rawDir = util.trimdir(os.path.abspath(redDir + '../raw') + '/')
    dataDir = util.trimdir(os.path.abspath(redDir + '../..') + '/')

    util.mkdir(calDir)
    util.mkdir(maskDir)

    _out = maskDir + output
    _dark = darkDir + dark
    _flat = flatDir + flat
    _nirc2mask = module_dir + '/masks/nirc2mask.fits'

    util.rmall([_out])

    # Make hot pixel mask
    whatDir = redDir + dark
    print(whatDir)

    text_output = ir.imstatistics(_dark,
                                  fields="mean,stddev",
                                  nclip=10,
                                  format=0,
                                  Stdout=1)
    print text_output
    values = text_output[0].split()
    hi = float(values[0]) + (10.0 * float(values[1]))

    img_dk = pyfits.getdata(_dark)
    hot = img_dk > hi

    # Make dead pixel mask
    text_output = ir.imstatistics(_flat,
                                  fields="mean,stddev",
                                  nclip=10,
                                  format=0,
                                  Stdout=1)
    values = text_output[0].split()
    #lo = float(values[0]) - (15.0 * float(values[1]))
    # If flat is normalized, then lo should be set to 0.5
    lo = 0.5
    hi = float(values[0]) + (15.0 * float(values[1]))

    img_fl = pyfits.getdata(_flat)
    dead = np.logical_or(img_fl > hi, img_fl < lo)

    # We also need the original NIRC2 mask (with cracks and such)
    nirc2mask = pyfits.getdata(_nirc2mask)

    # Combine into a final supermask. Use the flat file just as a template
    # to get the header from.
    ofile = pyfits.open(_flat)

    if ((hot.shape)[0] == (nirc2mask.shape)[0]):
        mask = hot + dead + nirc2mask
    else:
        mask = hot + dead
    mask = (mask != 0)
    unmask = (mask == 0)
    ofile[0].data[unmask] = 0
    ofile[0].data[mask] = 1
    ofile[0].writeto(_out, output_verify='silentfix')
Exemplo n.º 22
0
def makeNirc2mask(dark, flat, outDir):
    """Make the static bad pixel mask for NIRC2. This only needs to be
    run once. This creates a file called nirc2mask.fits which is
    subsequently used throughout the pipeline. The dark should be a long
    integration dark.
    
    @param dark: The full absolute path to a medianed dark file. This is 
        used to construct a hot pixel mask (4 sigma detection thresh).
    @type dark: str
    @param flat: The full absolute path to a medianed flat file. This is
         used to construct a dead pixel mask.
    @type flat: str
    @param outDir: full path to output directory with '/' at the end.
    @type outDir: str
    """
    _out = outDir + 'nirc2mask.fits'
    _dark = dark
    _flat = flat

    util.rmall([_out])

    # Make hot pixel mask
    text_output = ir.imstatistics(_dark,
                                  fields="mean,stddev",
                                  nclip=10,
                                  format=0,
                                  Stdout=1)
    values = text_output[0].split()
    hi = float(values[0]) + (15.0 * float(values[1]))

    img_dk = pyfits.getdata(_dark)
    hot = img_dk > hi
    print 'Found %d hot pixels' % (hot.sum())

    # Make dead pixel mask
    text_output = ir.imstatistics(_flat,
                                  fields="mean,stddev",
                                  nclip=10,
                                  format=0,
                                  Stdout=1)
    values = text_output[0].split()

    # Assuming flat is normalized, we don't want pixels with less
    # than 0.5 sensitivity
    #lo = float(values[0]) - (15.0 * float(values[1]))
    lo = 0.5  #mask = hot

    hi = float(values[0]) + (15.0 * float(values[1]))

    img_fl = pyfits.getdata(_flat)
    dead = logical_or(img_fl > hi, img_fl < lo)
    print 'Found %d dead pixels' % (dead.sum())

    # Combine into a final supermask
    file = pyfits.open(_flat)

    mask = hot + dead
    mask = (mask != 0)
    unmask = (mask == 0)
    file[0].data[unmask] = 0
    file[0].data[mask] = 1
    file[0].writeto(_out, output_verify='silentfix')
Exemplo n.º 23
0
if not os.path.isdir('flat_files'):
    os.mkdir('flat_files')

allfiles = flatlist
for file in allfiles:
    shutil.copy(file, 'flat_files/')

# combine flat images
print 'Combining flat images ...'
iraf.ccdred.flatcombine.ccdtype = ''
iraf.ccdred.flatcombine.process = 'no'
iraf.ccdred.flatcombine.reject = 'ccdclip'
iraf.ccdred.flatcombine.rdnoise = 'rdnoise'
iraf.ccdred.flatcombine.gain = 'gain'
iraf.ccdred.flatcombine.output = 'Flat'
iraf.ccdred.flatcombine(input=flatin)

for file in flatlist:
    os.remove(file)

print 'openning a ds9 window if not already openned...'
ds9.ds9()

# check output flat image
print 'Check output file:'
iraf.imstatistics('Flat')
print ' Running "imexamine" task..'
iraf.imexamine('Flat', 1)

print '--- DONE ---'
Exemplo n.º 24
0
print '\ncreating a zero_files/ dir to store original data'
if not os.path.isdir('zero_files'):
    os.mkdir('zero_files')

allfiles = zerolist
for file in allfiles:
    shutil.copy(file, 'zero_files/')

# combine bias images
print 'Combining zero level images...'
iraf.ccdred.zerocombine.ccdtype = ''
iraf.ccdred.zerocombine.reject = 'ccdclip'
iraf.ccdred.zerocombine.rdnoise = 'rdnoise'
iraf.ccdred.zerocombine.gain = 'gain'
iraf.ccdred.zerocombine(input=zeroin)

for zero in zerolist:
    os.remove(zero)

print 'openning a ds9 window if not already openned...'
ds9.ds9()

# check output image
print 'Check output file:'
iraf.imstatistics('Zero')

print ' Running "imexamine" task..'
iraf.imexamine('Zero', 1)

print '--- DONE ---'
Exemplo n.º 25
0
import pprint
from pyraf import iraf

stats = iraf.imstatistics("../VIS.fits[1]", Stdout=1)
pprint.pprint(stats)
Exemplo n.º 26
0
def makemask(dark, flat, output):
    """Make bad pixel mask for NIRC2 data. Makes a calib/ directory
    and stores all output there. All output and temporary files
    will be created in a masks/ subdirectory. 
    
    @param dark: The full relative path to a dark file. This is used to
        construct a hot pixel mask. Use a long (t>20sec) exposure dark.
    @type dark: str
    @param flat: The full relative path to a flat file. This is used to 
        construct a dead pixel mask. The flat should be normalized.
    @type flat: str
    @param output: output file name. This will be created in the masks/
        subdirectory.
    @type output: str
    """
    redDir = os.getcwd() + '/'
    calDir = redDir + 'calib/'
    maskDir = util.trimdir(calDir + 'masks/')
    flatDir = util.trimdir(calDir + 'flats/')
    darkDir = util.trimdir(calDir + 'darks/')
    rawDir = util.trimdir(os.path.abspath(redDir + '../raw') + '/')
    dataDir = util.trimdir(os.path.abspath(redDir + '../..') + '/')

    util.mkdir(calDir)
    util.mkdir(maskDir)

    _out = maskDir + output
    _dark = darkDir + dark
    _flat = flatDir + flat
    _nirc2mask = module_dir + '/masks/nirc2mask.fits'

    util.rmall([_out])

    # Make hot pixel mask
    whatDir = redDir + dark
    print(whatDir)

    text_output = ir.imstatistics(_dark, fields="mean,stddev", 
				  nclip=10, format=0, Stdout=1)
    print text_output
    values = text_output[0].split()
    hi = float(values[0]) + (10.0 * float(values[1]))

    img_dk = pyfits.getdata(_dark)
    hot = img_dk > hi

    # Make dead pixel mask
    text_output = ir.imstatistics(_flat, fields="mean,stddev", 
				  nclip=10, format=0, Stdout=1)
    values = text_output[0].split()
    #lo = float(values[0]) - (15.0 * float(values[1]))
    # If flat is normalized, then lo should be set to 0.5
    lo = 0.5
    hi = float(values[0]) + (15.0 * float(values[1]))

    img_fl = pyfits.getdata(_flat)
    dead = np.logical_or(img_fl > hi, img_fl < lo)
    
    # We also need the original NIRC2 mask (with cracks and such)
    nirc2mask = pyfits.getdata(_nirc2mask)

    # Combine into a final supermask. Use the flat file just as a template
    # to get the header from.
    ofile = pyfits.open(_flat)
    
    if ((hot.shape)[0] == (nirc2mask.shape)[0]):
        mask = hot + dead + nirc2mask
    else:
        mask = hot + dead
    mask = (mask != 0)
    unmask = (mask == 0)
    ofile[0].data[unmask] = 0
    ofile[0].data[mask] = 1
    ofile[0].writeto(_out, output_verify='silentfix')
Exemplo n.º 27
0
def run_daophot(image, outfile='default', coordfile='NA', backmethod='mean',apertures='1,2,3,4,5,6,7,8,9,10,12,14,16,18,20,24,28,32,36,40,45,50,55,60,65,70', cbox=10.0, \
		backmean=-9999.0,annulus=17.0, dannulus=3.0, calgorithm='centroid', salgorithm='median', fwhmpsf=2.5, backsigma=-1.0,rdnoise=-1.0, epadu=1.0):

	'''THIS PROCEDURE RUNS DAOPHOT ON INPUT IMAGE'''

	# Parse input parameters
	if outfile == 'default': outfile = image + '0.mag.1'
	if coordfile == 'NA': coordfile = image + '0.coo.1'

	# Read in fits header
	f = pyfits.open(image)
	fheader = f[0].header

	# Extract relevant info from the header
        naxis1 = fheader['NAXIS1']
        naxis2 = fheader['NAXIS2']
	exptime = fheader['exptime']
        instr = fheader['INSTRUME']
        if instr == 'WFC3':
                filter = fheader['FILTER']
        else: #assuming ACS
                filter = fheader['FILTER1']
                if filter[0] == 'C': filter == fheader['FILTER2']
	ipxscl = 0.03962
	opxscl = 0.03962
	f.close()
	dp_zmag = get_uvis_zeropoint(filter)


        # Number of flt images (tricky: IR/calibration images may have only 1 chip--NDRIZIM keyword adds both chips)
        if (fheader['detector'] == 'IR'): nchips = 1.0                                          # IR
        elif (fheader['subarray'] == True) and (len(fheader['CCDAMP']) == 1): nchips = 1.0        # UVIS sub-array
        elif (fheader['detector'] == 'UVIS') and (fheader['subarray'] == False): nchips = 2.0   # UVIS full-frame
        else: raise exception('Image type is not defined.')
        #num_flts = fheader['NDRIZIM']/nchips
	num_flts = 1.0

        # Perform read noise correction
        if rdnoise < 0.0:
                amps = fheader['CCDAMP']
                rdnoise = np.zeros(len(amps))
                for namp in xrange(len(amps)): rdnoise[namp] = fheader['READNSE'+amps[namp]]
        rdnoise_corr = np.sqrt(num_flts * (np.average(rdnoise) * opxscl/ipxscl)**2)


        # Measure the background and noise
        if (backmean < -1000.0) or (backsigma < 0.0):
                # read in the x/y center of the source
                xc, yc = np.loadtxt(coordfile, unpack=True, usecols = (0,1))

                #create temporary image for bckgrd measurement that masks sources out to 60 pixels (assign a very low number)
                tmp_image = image+'.back.fits'
                shutil.copy(image, tmp_image)
                ff=pyfits.open(tmp_image, mode='update')
                maskim = ff[0].data
                maskim[circular_mask(maskim.shape,60, x_offset=(xc-naxis1/2.0),  y_offset=(yc-naxis2/2.0))] = -99999.0


		# Also mask out sources with zero effective exposure [WE ELIMINATE PIXELS WITHIN 20 OF IMAGE BORDER]
		maskim[:,0:20] = -99999.0
		maskim[:,-20:] = -99999.0
		maskim[0:20,:] = -99999.0
		maskim[-20:,:] = -99999.0
                ff.close()

		# generate initial guess for lower/upper limits (use 10 sigma)
		if (backmean < -1000.0) | (backsigma < 0.0):
			initback = iraf.imstatistics(tmp_image+'[0]', fields='mode,stddev', lower = -100, upper = 10000, nclip=7, \
                                                     lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)

			if len(initback[0].split('  ')) != 2:
				raise Exception('Could not parse output from imstatistics.')
			else:
		    		llim = float(initback[0].split('  ')[0]) - 10.0*float(initback[0].split('  ')[1])
		    		ulim = float(initback[0].split('  ')[0]) + 10.0*float(initback[0].split('  ')[1])

		# measure mode and std dev of background using initial guess to constrain dynamic range
                if backmean < -1000.0:
                        backstats=iraf.imstatistics(tmp_image+'[0]', fields=backmethod, lower = llim, upper = ulim, nclip=7, \
                                                    lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
                        backmean=float(backstats[0])

                if backsigma < 0.0:
                        backstats=iraf.imstatistics(tmp_image+'[0]', fields='stddev', lower = llim, upper = ulim, nclip=7, \
                                                    lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
                        backsigma=float(backstats[0])


                #remove temporary image
                #os.remove(tmp_image)


	print ' BACKGROUND =  '+str(backmean)
	print ' BACKGROUND RMS =  '+str(backsigma)

	# Case of no aperture size given (we select aperture sizes of: WFC3= 0.27 and 0.4"  && ACS=0.25 and 0.5")
	if apertures == '0.0':
		if instr == 'WFC3' and filter[1] == '1': apertures=str(0.27/opxscl)+','+str(0.4/opxscl)	# case of IR filters
		elif instr == 'WFC3' and filter[1] != '1': apertures='5,'+str(0.4/opxscl)	# case of UVIS filters
		elif instr == 'WFC': apertures = '5,16.66'
		else: raise exception('UNKNOWN INSTRUMENT/FILTER')


	# Remove old phot output files
	file_query = os.access(outfile, os.R_OK)      
	if file_query == True: os.remove(outfile)

	# Run phot
	iraf.phot.unlearn()         # reset daophot parameters to default values
	iraf.phot(image=image+'[0]', interactive='no', verify='no', coords=coordfile, output=outfile, fwhmpsf=fwhmpsf, \
      		sigma=backsigma, readnoise=rdnoise_corr, itime=exptime, calgorithm=calgorithm, cbox=cbox, skyvalue=backmean, \
		apertures=apertures,zmag=dp_zmag, salgorithm='constant') 	#annulus=annulus, dannulus=dannulus


        # Display results of daophot
        #iraf.display(tmp[0]+'.fits',1, zscale='no', zrange='no', z1=0, z2=100,ztrans='log')
        #iraf.tvmark(1,outfile,mark = 'circle', radii = 10, color = 206)

	#return outfile 		# return name of output catalog
	return backmean,backsigma	# return computed background stats for image
Exemplo n.º 28
0
                x1=np.round(xc)+sz/2.
                y0=np.round(yc)-sz/2.
                y1=np.round(yc)+sz/2.
                #fig = pylab.figure()
                #fig.subplots_adjust(wspace=0.4)
                ax1 = pylab.subplot(1,2,1)
                ax1.imshow(np.log10(im[y0:y1,x0:x1]),interpolation='nearest')
                ax1.autoscale(axis='both',enable=False)
                ax1.scatter([xc-x0-1.0], [yc-y0-1.0], marker='x', s=200., color='w')
                pylab.title('X = '+str(xc)+'  Y = '+str(yc))

                # plot #2 - background histogram
                tmp_image=glob.glob('*back.fits')[0]
                backim = pyfits.getdata(tmp_image)
                #--measure back statistics (mean and mode via IRAF)
                initback = iraf.imstatistics(tmp_image+'[0]', fields='mode,stddev', lower = -100, upper = 10000, nclip=7,lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
                llim = float(initback[0].split('  ')[0]) - 10.0*float(initback[0].split('  ')[1])
                ulim = float(initback[0].split('  ')[0]) + 10.0*float(initback[0].split('  ')[1])       
                backstats=iraf.imstatistics(tmp_image+'[0]', fields='mean,mode', lower = llim, upper = ulim, nclip=7,lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
                backmean=float(backstats[0].split('  ')[0])
                backmode=float(backstats[0].split('  ')[1])
                fbackim= np.ndarray.flatten(backim)
                gd=np.where((fbackim > llim) & (fbackim < ulim))[0]
                backmedian=meanclip(fbackim[gd],maxiter=7,return_median=1)[0]

                ax2 = pylab.subplot(1,2,2)
                pylab.hist(fbackim[gd],log=True)
                pylab.ylim(0.5,600000)
                pylab.xlim(-20,20)
                pylab.plot([backmode,backmode],[0.5,600000],ls='-',color='red',label='mode')
                pylab.plot([backmedian,backmedian],[0.5,600000],ls='--',color='aqua',label='median')
Exemplo n.º 29
0
def run_daofind(image, outfile='default', dthreshold=3.0, fwhmpsf=2.5, backsigma=None,rdnoise=None):
    '''RUN DAOFIND ON INPUT IMAGE'''

    # -- parse output filename
    if outfile == 'default': outfile = image+'.coo'


    # -- extract header info 
    prihdr = pyfits.getheader(image)
    exptime = prihdr['exptime']
    instrum = prihdr['INSTRUME']
    detector = prihdr['DETECTOR']
    SUBARRAY = prihdr['SUBARRAY']
    ccdamp = prihdr['CCDAMP']


    # -- record filter name, native pixel scale, and no. of chips
    if instrum == 'WFC3':
        if detector == 'UVIS':
            pscale_nat = 0.03962
            if ((SUBARRAY == True) & (len(ccdamp) == 1)): nchips = 1.0
            elif SUBARRAY == False: nchips = 2.0
            else: raise Exception('Image type is not defined.')
        elif detector == 'IR':
            pscale_nat = 0.12825
            nchips = 1.0
        else: raise Exception('Detector '+detector+' not covered in our case list.')
    elif instrum == 'ACS':
        if detector == 'WFC':
            pscale_nat = 0.049
            if ((SUBARRAY == True) & (len(ccdamp) == 1)): nchips = 1.0
            elif SUBARRAY == False: nchips = 2.0
            else: raise Exception('Image type is not defined.')
        else: raise Exception('Detector '+detector+' not covered in our case list.')
    else: raise Exception('Instrument '+instrum+' not covered in our case list.')


    # -- record pixel scale of current image, image type, and number of flts
    sciext = []
    pscale_img = prihdr.get('D001SCAL',default='NA')
    if pscale_img == 'NA':
        imtype = 'flt'            # we dont distinguish between flt/crclean, i.e., assume pscales are equal
        pscale_img = pscale_nat
        num_flts = 1.0
        # -- record location of science extension
        hdulist = pyfits.open(image)
        for ext in xrange(len(hdulist)):
            if hdulist[ext].name == 'SCI': sciext.append(ext)
        hdulist.close()
        if len(sciext) != 1: raise Exception('We do not handle images with '+str(len(sciext))+' SCI extensions.')
    else:
        imtype ='drz'
        num_flts = prihdr['NDRIZIM']/nchips
        sciext.append(0)


    # -- estimate read noise
    if rdnoise == None:
	rdnoise = np.zeros(len(ccdamp))
	for namp in xrange(len(ccdamp)): rdnoise[namp] = prihdr['READNSE'+ccdamp[namp]]
    rdnoise_corr = np.sqrt(num_flts * (np.average(rdnoise) * pixscale_img/pixscale_nat)**2)


    # -- perform rough background noise calculation
    if backsigma == None:
        backstats=iraf.imstatistics(image+'['+str(sciext[0])+']', fields='stddev', lower = -100, upper = 100, nclip=5, \
                                    lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
        backsigma=float(backstats[0])


    # -- remove old daofind files/run daofind
    file_query = os.access(outfile, os.R_OK)	
    if file_query == True: os.remove(outfile)
    iraf.daofind.unlearn()
    iraf.daofind(image=image+'['+str(sciext[0])+']', interactive='no', verify='no',output=outfile, fwhmpsf=fwhmpsf, \
                 sigma=backsigma, readnoise=rdnoise_corr, itime=exptime, threshold=dthreshold, datamin=-10, datamax=100000)


    return outfile
Exemplo n.º 30
0
        ax2.scatter([xc - x0 - 1.0], [yc - y0 - 1.0],
                    marker='x',
                    s=200.,
                    color='w',
                    alpha=0.1)
        pylab.title('DQ ARRAY', fontsize='small')

        # plot #3 - background histogram
        tmp_image = glob.glob('*back.fits')[0]
        backim = pyfits.getdata(tmp_image)
        #--measure back statistics (mean and mode via IRAF)
        initback = iraf.imstatistics(tmp_image + '[0]',
                                     fields='mode,stddev',
                                     lower=-100,
                                     upper=10000,
                                     nclip=7,
                                     lsigma=3.0,
                                     usigma=3.0,
                                     cache='yes',
                                     format='no',
                                     Stdout=1)
        llim = float(initback[0].split('  ')[0]) - 10.0 * float(
            initback[0].split('  ')[1])
        ulim = float(initback[0].split('  ')[0]) + 10.0 * float(
            initback[0].split('  ')[1])
        backstats = iraf.imstatistics(tmp_image + '[0]',
                                      fields='mean,mode,stddev',
                                      lower=llim,
                                      upper=ulim,
                                      nclip=7,
                                      lsigma=3.0,
                                      usigma=3.0,
Exemplo n.º 31
0
print '\ncreating a zero_files/ dir to store original data'
if not os.path.isdir('zero_files'):
    os.mkdir('zero_files')

allfiles = zerolist
for file in allfiles:
    shutil.copy(file, 'zero_files/')

# combine bias images
print 'Combining zero level images...'
iraf.ccdred.zerocombine.ccdtype = ''
iraf.ccdred.zerocombine.reject = 'ccdclip'
iraf.ccdred.zerocombine.rdnoise = 'rdnoise'
iraf.ccdred.zerocombine.gain = 'gain'
iraf.ccdred.zerocombine(input=zeroin)

for zero in zerolist:
    os.remove(zero)

print 'openning a ds9 window if not already openned...'
ds9.ds9()

# check output image
print 'Check output file:'
iraf.imstatistics('Zero')

print ' Running "imexamine" task..'
iraf.imexamine('Zero', 1)

print '--- DONE ---'
Exemplo n.º 32
0
def run_daofind(image,
                outfile='default',
                dthreshold=3.0,
                fwhmpsf=2.5,
                backsigma=None,
                rdnoise=None):
    '''RUN DAOFIND ON INPUT IMAGE'''

    # -- parse output filename
    if outfile == 'default': outfile = image + '.coo'

    # -- extract header info
    prihdr = pyfits.getheader(image)
    exptime = prihdr['exptime']
    instrum = prihdr['INSTRUME']
    detector = prihdr['DETECTOR']
    SUBARRAY = prihdr['SUBARRAY']
    ccdamp = prihdr['CCDAMP']

    # -- record filter name, native pixel scale, and no. of chips
    if instrum == 'WFC3':
        if detector == 'UVIS':
            pscale_nat = 0.03962
            if ((SUBARRAY == True) & (len(ccdamp) == 1)): nchips = 1.0
            elif SUBARRAY == False: nchips = 2.0
            else: raise Exception('Image type is not defined.')
        elif detector == 'IR':
            pscale_nat = 0.12825
            nchips = 1.0
        else:
            raise Exception('Detector ' + detector +
                            ' not covered in our case list.')
    elif instrum == 'ACS':
        if detector == 'WFC':
            pscale_nat = 0.049
            if ((SUBARRAY == True) & (len(ccdamp) == 1)): nchips = 1.0
            elif SUBARRAY == False: nchips = 2.0
            else: raise Exception('Image type is not defined.')
        else:
            raise Exception('Detector ' + detector +
                            ' not covered in our case list.')
    else:
        raise Exception('Instrument ' + instrum +
                        ' not covered in our case list.')

    # -- record pixel scale of current image, image type, and number of flts
    sciext = []
    pscale_img = prihdr.get('D001SCAL', default='NA')
    if pscale_img == 'NA':
        imtype = 'flt'  # we dont distinguish between flt/crclean, i.e., assume pscales are equal
        pscale_img = pscale_nat
        num_flts = 1.0
        # -- record location of science extension
        hdulist = pyfits.open(image)
        for ext in xrange(len(hdulist)):
            if hdulist[ext].name == 'SCI': sciext.append(ext)
        hdulist.close()
        if len(sciext) != 1:
            raise Exception('We do not handle images with ' +
                            str(len(sciext)) + ' SCI extensions.')
    else:
        imtype = 'drz'
        num_flts = prihdr['NDRIZIM'] / nchips
        sciext.append(0)

    # -- estimate read noise
    if rdnoise == None:
        rdnoise = np.zeros(len(ccdamp))
        for namp in xrange(len(ccdamp)):
            rdnoise[namp] = prihdr['READNSE' + ccdamp[namp]]
    rdnoise_corr = np.sqrt(
        num_flts * (np.average(rdnoise) * pixscale_img / pixscale_nat)**2)

    # -- perform rough background noise calculation
    if backsigma == None:
        backstats=iraf.imstatistics(image+'['+str(sciext[0])+']', fields='stddev', lower = -100, upper = 100, nclip=5, \
                                    lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
        backsigma = float(backstats[0])

    # -- remove old daofind files/run daofind
    file_query = os.access(outfile, os.R_OK)
    if file_query == True: os.remove(outfile)
    iraf.daofind.unlearn()
    iraf.daofind(image=image+'['+str(sciext[0])+']', interactive='no', verify='no',output=outfile, fwhmpsf=fwhmpsf, \
                 sigma=backsigma, readnoise=rdnoise_corr, itime=exptime, threshold=dthreshold, datamin=-10, datamax=100000)

    return outfile
Exemplo n.º 33
0
def run_daophot(image, outfile='default', coordfile='NA', backmethod='mean',apertures='1,2,3,4,5,6,7,8,9,10,12,14,16,18,20,24,28,32,36,40,45,50,55,60,65,70', cbox=10.0, \
  backmean=-9999.0,annulus=17.0, dannulus=3.0, calgorithm='centroid', salgorithm='median', fwhmpsf=2.5, backsigma=-1.0,rdnoise=-1.0, epadu=1.0):
    '''THIS PROCEDURE RUNS DAOPHOT ON INPUT IMAGE'''

    # Parse input parameters
    if outfile == 'default': outfile = image + '0.mag.1'
    if coordfile == 'NA': coordfile = image + '0.coo.1'

    # Read in fits header
    f = pyfits.open(image)
    fheader = f[0].header

    # Extract relevant info from the header
    naxis1 = fheader['NAXIS1']
    naxis2 = fheader['NAXIS2']
    exptime = fheader['exptime']
    instr = fheader['INSTRUME']
    if instr == 'WFC3':
        filter = fheader['FILTER']
    else:  #assuming ACS
        filter = fheader['FILTER1']
        if filter[0] == 'C': filter == fheader['FILTER2']
    ipxscl = 0.03962
    opxscl = 0.03962
    f.close()
    dp_zmag = get_uvis_zeropoint(filter)

    # Number of flt images (tricky: IR/calibration images may have only 1 chip--NDRIZIM keyword adds both chips)
    if (fheader['detector'] == 'IR'): nchips = 1.0  # IR
    elif (fheader['subarray'] == True) and (len(fheader['CCDAMP']) == 1):
        nchips = 1.0  # UVIS sub-array
    elif (fheader['detector'] == 'UVIS') and (fheader['subarray'] == False):
        nchips = 2.0  # UVIS full-frame
    else:
        raise exception('Image type is not defined.')
    #num_flts = fheader['NDRIZIM']/nchips
    num_flts = 1.0

    # Perform read noise correction
    if rdnoise < 0.0:
        amps = fheader['CCDAMP']
        rdnoise = np.zeros(len(amps))
        for namp in xrange(len(amps)):
            rdnoise[namp] = fheader['READNSE' + amps[namp]]
    rdnoise_corr = np.sqrt(num_flts *
                           (np.average(rdnoise) * opxscl / ipxscl)**2)

    # Measure the background and noise
    if (backmean < -1000.0) or (backsigma < 0.0):
        # read in the x/y center of the source
        xc, yc = np.loadtxt(coordfile, unpack=True, usecols=(0, 1))

        #create temporary image for bckgrd measurement that masks sources out to 60 pixels (assign a very low number)
        tmp_image = image + '.back.fits'
        shutil.copy(image, tmp_image)
        ff = pyfits.open(tmp_image, mode='update')
        maskim = ff[0].data
        maskim[circular_mask(maskim.shape,
                             60,
                             x_offset=(xc - naxis1 / 2.0),
                             y_offset=(yc - naxis2 / 2.0))] = -99999.0

        # Also mask out sources with zero effective exposure [WE ELIMINATE PIXELS WITHIN 20 OF IMAGE BORDER]
        maskim[:, 0:20] = -99999.0
        maskim[:, -20:] = -99999.0
        maskim[0:20, :] = -99999.0
        maskim[-20:, :] = -99999.0
        ff.close()

        # generate initial guess for lower/upper limits (use 10 sigma)
        if (backmean < -1000.0) | (backsigma < 0.0):
            initback = iraf.imstatistics(tmp_image+'[0]', fields='mode,stddev', lower = -100, upper = 10000, nclip=7, \
                                                              lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)

            if len(initback[0].split('  ')) != 2:
                raise Exception('Could not parse output from imstatistics.')
            else:
                llim = float(initback[0].split('  ')[0]) - 10.0 * float(
                    initback[0].split('  ')[1])
                ulim = float(initback[0].split('  ')[0]) + 10.0 * float(
                    initback[0].split('  ')[1])

    # measure mode and std dev of background using initial guess to constrain dynamic range
        if backmean < -1000.0:
            backstats=iraf.imstatistics(tmp_image+'[0]', fields=backmethod, lower = llim, upper = ulim, nclip=7, \
                                        lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
            backmean = float(backstats[0])

        if backsigma < 0.0:
            backstats=iraf.imstatistics(tmp_image+'[0]', fields='stddev', lower = llim, upper = ulim, nclip=7, \
                                        lsigma=3.0, usigma=3.0, cache='yes', format='no',Stdout=1)
            backsigma = float(backstats[0])

        #remove temporary image
        #os.remove(tmp_image)

    print ' BACKGROUND =  ' + str(backmean)
    print ' BACKGROUND RMS =  ' + str(backsigma)

    # Case of no aperture size given (we select aperture sizes of: WFC3= 0.27 and 0.4"  && ACS=0.25 and 0.5")
    if apertures == '0.0':
        if instr == 'WFC3' and filter[1] == '1':
            apertures = str(0.27 / opxscl) + ',' + str(
                0.4 / opxscl)  # case of IR filters
        elif instr == 'WFC3' and filter[1] != '1':
            apertures = '5,' + str(0.4 / opxscl)  # case of UVIS filters
        elif instr == 'WFC':
            apertures = '5,16.66'
        else:
            raise exception('UNKNOWN INSTRUMENT/FILTER')

    # Remove old phot output files
    file_query = os.access(outfile, os.R_OK)
    if file_query == True: os.remove(outfile)

    # Run phot
    iraf.phot.unlearn()  # reset daophot parameters to default values
    iraf.phot(image=image+'[0]', interactive='no', verify='no', coords=coordfile, output=outfile, fwhmpsf=fwhmpsf, \
           sigma=backsigma, readnoise=rdnoise_corr, itime=exptime, calgorithm=calgorithm, cbox=cbox, skyvalue=backmean, \
     apertures=apertures,zmag=dp_zmag, salgorithm='constant')  #annulus=annulus, dannulus=dannulus

    # Display results of daophot
    #iraf.display(tmp[0]+'.fits',1, zscale='no', zrange='no', z1=0, z2=100,ztrans='log')
    #iraf.tvmark(1,outfile,mark = 'circle', radii = 10, color = 206)

    #return outfile 		# return name of output catalog
    return backmean, backsigma  # return computed background stats for image