示例#1
0
文件: red.py 项目: beevageeva/tobs
def flatCorrection():
    print "FlatCorrection start"
    #put all others params to no , they may be set by previous actions
    #WITH ccdproc
    #	iraf.ccdproc.setParam('flatcor', 'yes')
    #	iraf.ccdproc.setParam('fixpix', 'no')
    #	iraf.ccdproc.setParam('darkcor', 'no')
    #	iraf.ccdproc.setParam('illumcor', 'no')
    #	iraf.ccdproc.setParam('trim', 'no')
    #	iraf.ccdproc.setParam('overscan', 'no')
    #	iraf.ccdproc.setParam('zerocor', 'no')
    #	iraf.ccdproc.setParam('trimsec', '')
    #	iraf.ccdproc.setParam('biassec', '')
    #	#online
    #	iraf.ccdproc.setParam('output', '')
    for f in FILTERS:
        flatfilename = os.path.join(OUTPUTDIR, "flat", f, "FlatNorm.fits")
        if os.path.isfile(flatfilename):
            #			#WITH ccdproc
            #			iraf.ccdproc.setParam('flat', flatfilename)
            #			iraf.ccdproc.setParam("images", os.path.join(OUTPUTDIR, "object", f) + "/*.fits")
            #			iraf.ccdproc()
            with open(os.path.join(OUTPUTDIR, "object", f, "list")) as file1:
                for line in file1:
                    objfilename = line.strip()
                    if objfilename != "":
                        objFullfilename = os.path.join(OUTPUTDIR, "object", f,
                                                       line.strip())
                        iraf.imarith(objFullfilename, '/', flatfilename,
                                     objFullfilename)
        else:
            print "Flat file %s not present" % flatfilename
    print "FlatCorrection end"
示例#2
0
def quartz_divide(science_list,object_match):
    '''Divides science frames by user-selected quartz frames'''
    for obj in science_list:
        if len(object_match[obj]) > 1:
            qtzinpt = ''
            for qtz in object_match[obj]:
                qtzinpt += qtz +','
            iraf.imcombine(input=qtzinpt,output='tempquartz')
            iraf.imarith(operand1=obj,operand2='tempquartz',op='/',result='f'+obj)
            heditstr = 'Flat field images are '+qtzinpt[:-1]
            iraf.imdelete(images='tempquartz',go_ahead='yes',verify='no')
            if len(heditstr) > 65:
                nfields = int(len(heditstr)/65) #Declare int for py3 compatibility
                for ii in range(nfields+1):
                    writestr = heditstr[(ii*65):(ii+1)*65]
                    iraf.hedit(images='f'+obj,fields='flatcor'+str(ii),value=writestr,add='yes',verify='No')
            else:
                iraf.hedit(images='f'+obj,fields='flatcor',value=heditstr,add='yes',verify='No')
        else:
            iraf.imarith(operand1=obj,operand2=object_match[obj][0],op='/',result='f'+obj)
            heditstr = 'Flat field image is '+object_match[obj][0]
            if len(heditstr) > 65:
                nfields = int(len(heditstr)/65) #Declare int for py3 compatibility
                for ii in range(nfields+1):
                    writestr = heditstr[(ii*65):(ii+1)*65]
                    iraf.hedit(images='f'+obj,fields='flatcor'+str(ii),value=writestr,add='yes',verify='No')
            else:
                iraf.hedit(images='f'+obj,fields='flatcor',value=heditstr,add='yes',verify='No')
    return
示例#3
0
def imarith(operand1, op, operand2, result, doAirmass=False):
    from pyraf import iraf
    iraf.images()

    pars = iraf.imarith.getParList()
    iraf.imcombine.unlearn()

    try:
        os.remove(result)
    except:
        pass

    print "%s %s %s -> %s" % (operand1, op, operand2, result)
    iraf.imarith(operand1=operand1, op=op, operand2=operand2, result=result)
    iraf.imarith.setParList(pars)
    if doAirmass:
        # Adjust FITS header
        with pf.open(operand1) as f:
            am1 = f[0].header['airmass']
        with pf.open(operand2) as f:
            am2 = f[0].header['airmass']

        of = pf.open(result)
        of[0].header['airmass1'] = am1
        of[0].header['airmass2'] = am2
        of.writeto(result, clobber=True)
示例#4
0
def MEFarithOLD(MEF, image, out, op, result):

    if os.path.exists(out + '.fits'):
        os.remove(out + '.fits')
    for i in range(1, 88):
        header = pyfits.open(MEF + '.fits')
        extname = header[i].header['EXTNAME']
        if extname == 'DQ' or extname == 'VAR':
            iraf.imarith(operand1=MEF + '[' + str(i) + ']',
                         op='*',
                         operand2='1',
                         result=out)
        if extname == 'SCI':
            iraf.imarith(operand1=MEF + '[' + str(i) + ']',
                         op=op,
                         operand2=image,
                         result=out,
                         divzero=0.0)

    iraf.fxcopy(input=MEF + '[0],' + out, output=result)
    iraf.hedit(result + '[1]',
               field='EXTNAME',
               value='SCI',
               add='yes',
               verify='no')
    iraf.hedit(result + '[1]',
               field='EXTVER',
               value='1',
               add='yes',
               verify='no')
def ExpNormalize(images, outbase="_n"):
			
	# Build the list of output image names
	out = [os.path.splitext(i) for i in images]	# Split off the extension
	out = [i[0] + outbase + '.fits' for i in out]	# Paste the outbase at the end of the filename 
												# and put the extension back on
	# Get a list of exposure times.
	exp_times = [GetHeaderKeyword(i, 'exptime') for i in images]
	exp_times = [str(e) for e in exp_times]	
	
	# run imarith to do the normalization
	iraf.imarith.unlearn()
	iraf.imarith.op = '/'
	iraf.imarith.mode = 'h'

	for i in range(len(images)):
		iraf.imarith.operand1 = images[i]
		iraf.imarith.operand2 = exp_times[i]
		iraf.imarith.result = out[i]
	
		iraf.imarith()

		# update the exptime keyword		
		iraf.hedit.unlearn()
		iraf.hedit.verify='no'
		iraf.hedit.show='yes'
		iraf.hedit.update='yes'
		iraf.hedit.images=out[i]
		iraf.hedit.fields='exptime'
		iraf.hedit.value=1
		iraf.hedit.mode='h'

		iraf.hedit(Stdout=1)

	return out
示例#6
0
文件: irproc.py 项目: ih64/4U1543
def skySub(dfView):
    """
	given the dfview, and using the skyflat calculated in makeSkyFlat,
	subtract the skyflat from each image
	the output will be one sky-subtracted image for each image in the dfView
	the names of the sky-subtracted fits images have 'sky' infront of them
	"""
    # grab the paths to images to be sky-subtracted
    images = dfView.file.values.tolist()
    # adding a 's' infront of each file name, save to a seperate list
    skySubImages = ["s-" + i[5:] for i in images]
    # grab the YYMMDD observation date from this view
    date = str(dfView.Date.values[0])

    # organize the input, skyflat, and output in iraf-friendly ways
    inputFiles = joinStrList(images)
    skyFlat = "scratch/" + date + "sky.fits"
    outputSkySub = joinStrList(skySubImages, scratch=True)
    iraf.imarith(
        inputFiles,
        "-",
        skyFlat,
        outputSkySub,
        divzero=0.0,
        hparams="",
        pixtype="",
        calctype="",
        verbose="yes",
        noact="no",
    )
    return
示例#7
0
文件: red.py 项目: beevageeva/tobs
def flatCorrection():
	print "FlatCorrection start"
	#put all others params to no , they may be set by previous actions
	#WITH ccdproc
#	iraf.ccdproc.setParam('flatcor', 'yes')
#	iraf.ccdproc.setParam('fixpix', 'no')
#	iraf.ccdproc.setParam('darkcor', 'no')
#	iraf.ccdproc.setParam('illumcor', 'no')
#	iraf.ccdproc.setParam('trim', 'no')
#	iraf.ccdproc.setParam('overscan', 'no')
#	iraf.ccdproc.setParam('zerocor', 'no')
#	iraf.ccdproc.setParam('trimsec', '')
#	iraf.ccdproc.setParam('biassec', '')
#	#online
#	iraf.ccdproc.setParam('output', '')
	for f in FILTERS:	
		flatfilename = os.path.join(OUTPUTDIR, "flat", f, "FlatNorm.fits")
		if os.path.isfile(flatfilename):
#			#WITH ccdproc
#			iraf.ccdproc.setParam('flat', flatfilename)
#			iraf.ccdproc.setParam("images", os.path.join(OUTPUTDIR, "object", f) + "/*.fits")
#			iraf.ccdproc()
			with open(os.path.join(OUTPUTDIR, "object", f, "list")) as file1:
				for line in file1:
					objfilename = line.strip()
					if objfilename!="":
						objFullfilename = os.path.join(OUTPUTDIR, "object", f,	line.strip())
						iraf.imarith(objFullfilename, '/', flatfilename, objFullfilename)
		else:
			print "Flat file %s not present" % flatfilename
	print "FlatCorrection end"
示例#8
0
文件: red.py 项目: beevageeva/tobs
def createFlatFiles():
	import re
	print "CreateFlatFiles start"
	for f in FILTERS:
		if(os.listdir(os.path.join(OUTPUTDIR, "flat", f))):
			iraf.imcombine.setParam("input", os.path.join(OUTPUTDIR, "flat", f) + "/*.fits")
			flatFile = os.path.join(OUTPUTDIR, "flat", f , "Flat.fits")
			if os.path.exists(flatFile):
				print("flatFile %s alreday exists deleting"  % flatFile)
				os.remove(flatFile)
			iraf.imcombine.setParam("output", flatFile)
			#from doc:	
			#http://www.iac.es/sieinvens/siepedia/pmwiki.php?n=HOWTOs.PythonianIRAF
			#--> iraf.listpix(mode='ql')     # confirms parameter
			#--> iraf.listpix(mode='h')     # doesn't ask for parameter...@@
			iraf.imcombine(mode="h")
			#NORMALIZE
			#imstat
			res = iraf.imstat(flatFile, Stdout=1)
			print(res[0].strip()) 
			print(res[1].strip()) 
			resArray = re.split("\s+", res[1].strip())
			#max value
			#toDivValue = float(resArray[5])
			#meanValue
			toDivValue = float(resArray[2])
			flatNormFile = os.path.join(OUTPUTDIR, "flat", f , "FlatNorm.fits")
			if os.path.exists(flatNormFile):
				print("flatNormFile %s alreday exists deleting"  % flatNormFile)
				os.remove(flatNormFile)
			#divide by max value
			iraf.imarith(flatFile, '/', toDivValue, flatNormFile)
		else:
			print("NO FLAT FILES for filter %s PRESENT" %f)
	print "CreateFlatFiles end"
示例#9
0
 def cut_xy(self, x, y, band, width, name, norm=False):
     # width is in arcsec
     imgname = '%s_%s.fits' % (name, band)
     if os.path.exists(imgname):
         os.remove(imgname)
     width_pix = width / self.pixscale[band]
     xmin = int((x - width_pix / 2.))
     xmax = int((x + width_pix / 2.))
     ymin = int((y - width_pix / 2.))
     ymax = int((y + width_pix / 2.))
     # Enforce that the image have equal number of pixels in both dimensions
     npix = np.min([xmax - xmin, ymax - ymin])
     if xmax - xmin > npix:
         xmin += 1
     elif ymax - ymin > npix:
         ymin += 1
     self.xmin = xmin
     self.ymin = ymin
     iraf.imcopy('%s[%d:%d,%d:%d]' %
                 (self.images[band], xmin, xmax, ymin, ymax),
                 imgname,
                 verbose=False)
     if norm:
         imgsum = pyfits.getdata(imgname).ravel().sum()
         imgsum = np.abs(imgsum)
         iraf.imarith(imgname, '/', imgsum, 'temp.fits')
         os.remove(imgname)
         os.system('mv temp.fits %s' % imgname)
     return imgname
示例#10
0
 def sub_darks(self,data):
     for exp in data.keys():
         inputlist = data[exp]['raw']
         outputlist = [s[s.rfind('/')+1:s.find('.FIT')]+\
                           '_ds.fits'\
                           for s in inputlist]
         
         if self.darks[exp]['combined'] == None: self.gen_dark(exp)
         if len(inputlist) > 10:
             while len(inputlist) > 0:
                 inputstring = ','.join(inputlist[:10])
                 outputstring = ','.join(outputlist[:10])
                 iraf.imarith(inputstring,'-',\
                                  self.darks[exp]['combined'],\
                                  outputstring)
                 data[exp]['ds'] += outputlist[:10]
                 inputlist = inputlist[10:]
                 outputlist = outputlist[10:]
             
         else:
             iraf.imarith(','.join(inputlist),\
                              '-',\
                              self.darks[exp]['combined'],\
                              ','.join([s[s.rfind('/')+1:s.find('.FIT')]+\
                                            '_ds.fits' for s in inputlist]))
             data[exp]['ds'] += outputlist
示例#11
0
def make_flat(images,
              outflat,
              gain=1.0,
              rdnoise=0.0,
              xwindow=50,
              ywindow=50,
              hmin=0,
              hmax=65535,
              lowclip=0.7,
              highclip=1.3):
    '''Construct flat field from individual frames'''

    flatimages = ','.join(images)
    iraf.flatcombine(flatimages,
                     output='flat1',
                     combine='median',
                     reject='avsigclip',
                     ccdtype='',
                     process=no,
                     subsets=no,
                     delete=no,
                     clobber=no,
                     scale='median',
                     lsigma=3.0,
                     hsigma=3.0,
                     gain=gain,
                     rdnoise=rdnoise)
    iraf.fmedian('flat1', 'flat2', xwindow, ywindow, hmin=hmin, hmax=hmax)
    iraf.imarith('flat1', '/', 'flat2', outflat)
    iraf.imreplace(outflat, 1.0, lower=INDEF, upper=lowclip)
    iraf.imreplace(outflat, 1.0, lower=highclip, upper=INDEF)

    return
示例#12
0
    def mk24noiseimage(self):
	os.chdir(self.imagepath24)

        # remove temp images if they exist
        os.system('rm temp*.fits')

        # multiply image by exptime x gain x coverage map
        scale=FLUXCONV*GAIN*EXPT
        iraf.imarith(operand1=self.sex_image,op='*',operand2=scale,result='temp1.fits')
        iraf.imarith(operand1='temp1.fits',op='*',operand2=self.cov_image,result='temp2.fits')
        
        # smooth image using iraf.images.imfilter.gauss
        iraf.gauss(input='temp2.fits',output='temp3.fits',sigma=2,nsigma=6)

        # take sqrt
        iraf.imfunction(input='temp3.fits',output='temp4.fits',function='sqrt')
        # divide by exptime x gain x coverage map

        iraf.imarith(operand1='temp4.fits',op='/',operand2=scale,result='temp5.fits')
    
        # mutliply image and sigma image by 100
        s=self.prefix+'-scalednoise.fits'
        iraf.imarith(operand1='temp5.fits',op='*',operand2=100,result=s)
        s=self.prefix+'-scaled24.fits'
        iraf.imarith(operand1=self.sex_image,op='*',operand2=100,result=s)
示例#13
0
def makeflats():
    filters = ['J', '1113', '1184']
    flatfiles = ['@FlatfilesJ', '@Flatfiles1113', '@Flatfiles1184']
    for i in range(len(filters)):
        filt = filters[i]
        files = flatfiles[i]
        #files='*'+filt+'*.fits'
        flat = 'flat' + filt
        iraf.imcombine(files,
                       output=flat,
                       combine="median",
                       reject="minmax",
                       scale='median',
                       weight='exposure',
                       statsec="[400:600,400:600]",
                       nlow=2,
                       nhigh=4)
        input = flat + '.fits[300:800,300:800]'
        stats = iraf.imstatistics(input,
                                  fields="mean",
                                  lower=1,
                                  format=0,
                                  Stdout=1)
        print 'stats = ', stats, stats[0]
        ave = float(stats[0])
        iraf.imarith(flat, "/", ave, flat)  #normalize flat
示例#14
0
def ExpNormalize(images, outbase="_en"):
			
	# Build the list of output image names
	out = [os.path.splitext(i) for i in images]	# Split off the extension
	out = [i[0] + outbase + '.fits' for i in out]	# Paste the outbase at the end of the filename 
												# and put the extension back on
	# Get a list of exposure times.
	exp_times = [GetHeaderKeyword(i, 'exptime') for i in images]
	exp_times = [str(e) for e in exp_times]	
	
	# run imarith to do the normalization
	iraf.imarith.unlearn()
	iraf.imarith.op = '/'
	iraf.imarith.mode = 'h'

	for i in range(len(images)):
		iraf.imarith.operand1 = images[i]
		iraf.imarith.operand2 = exp_times[i]
		iraf.imarith.result = out[i]
	
		iraf.imarith()

		# update the exptime keyword		
		iraf.hedit.unlearn()
		iraf.hedit.verify='no'
		iraf.hedit.show='yes'
		iraf.hedit.update='yes'
		iraf.hedit.images=out[i]
		iraf.hedit.fields='exptime'
		iraf.hedit.value=1
		iraf.hedit.mode='h'

		iraf.hedit(Stdout=1)

	return out
示例#15
0
def check_flats():

    """
    Divide each flat-field by the average.

    Individually inspect each 2D spectrum and if see features need to discard

    """

    cwd = os.getcwd()
    print 'Current working directory is ' + cwd

    with open('input.list','w') as f:
        for name in os.listdir(cwd):
            if (name.endswith('.fit')) & (name.startswith('cr')):
                f.write( name + '[1]' + '\n')

    with open('output.list','w') as f:
        for name in os.listdir(cwd):
            if (name.endswith('.fit')) & (name.startswith('cr')):
                if os.path.exists('check_' + name):
                    os.remove('check_' + name)
                    print 'Deleting file ' + 'check_' + name
                f.write( 'check_' + name + '\n')

    iraf.imarith.setParam('operand1', '@input.list')
    iraf.imarith.setParam('operand2', 'Flat.fits')
    iraf.imarith.setParam('op','/')
    iraf.imarith.setParam('result','@output.list')

    iraf.imarith()

    return None
示例#16
0
def vega(spectrum, band, path, hlineinter, telluric_shift_scale_record, log, over, airmass=1.0):
    """
    Use iraf.telluric to remove H lines from standard star, then remove
    normalization added by telluric with iraf.imarith.

    The extension for vega_ext.fits is specified from band (from header of
    telluricfile.fits).

    Args:
        spectrum (string): filename from 'telluricfile'.
        band: from telluricfile .fits header. Eg 'K', 'H', 'J'.
        path: usually top directory with Nifty scripts.
        hlineinter (boolean): Interactive H line fitting. Specified with -i at
                              command line. Default False.
        airmass: from telluricfile .fits header.
        telluric_shift_scale_record: "pointer" to telluric_hlines.txt.
        log: path to logfile.
        over (boolean): overwrite old files. Specified at command line.

    """
    if band=='K':
        ext = '1'
        sample = "21537:21778"
        scale = 0.8
    if band=='H':
        ext = '2'
        sample = "16537:17259"
        scale = 0.7
    if band=='J':
        ext = '3'
        sample = "11508:13492"
        scale = 0.885
    if band=='Z':
        ext = '4'
        sample = "*"
        scale = 0.8
    if os.path.exists("tell_nolines.fits"):
            if over:
                os.remove("tell_nolines.fits")
                tell_info = iraf.telluric(input=spectrum+"[1]", output='tell_nolines', cal= RUNTIME_DATA_PATH+'vega_ext.fits['+ext+']', xcorr='yes', tweakrms='yes', airmass=airmass, inter=hlineinter, sample=sample, threshold=0.1, lag=3, shift=0., dshift=0.05, scale=scale, dscale=0.05, offset=0., smooth=1, cursor='', mode='al', Stdout=1)
            else:
                logging.info("Output file exists and -over not set - skipping H line correction")
    else:
        tell_info = iraf.telluric(input=spectrum+"[1]", output='tell_nolines', cal= RUNTIME_DATA_PATH+'vega_ext.fits['+ext+']', xcorr='yes', tweakrms='yes', inter=hlineinter, airmass=airmass, sample=sample, threshold=0.1, lag=3, shift=0., dshift=0.05, scale=scale, dscale=0.05, offset=0., smooth=1, cursor='', mode='al', Stdout=1)

    # need this loop to identify telluric output containing warning about pix outside calibration limits (different formatting)
    if "limits" in tell_info[-1].split()[-1]:
        norm=tell_info[-2].split()[-1]
    else:
        norm=tell_info[-1].split()[-1]

    if os.path.exists("final_tel_no_hlines_no_norm.fits"):
        if over:
            os.remove("final_tel_no_hlines_no_norm.fits")
            iraf.imarith(operand1='tell_nolines', op='/', operand2=norm, result='final_tel_no_hlines_no_norm', title='', divzero=0.0, hparams='', pixtype='', calctype='', verbose='yes', noact='no', mode='al')
        else:
            logging.info("Output file exists and -over not set - skipping H line normalization")
    else:
        iraf.imarith(operand1='tell_nolines', op='/', operand2=norm, result='final_tel_no_hlines_no_norm', title='', divzero=0.0, hparams='', pixtype='', calctype='', verbose='yes', noact='no', mode='al')
示例#17
0
 def unit_converter(self):
     '''
     Converts DRZ images from units of counts/sec to counts
     '''
     
     for image, exptime in zip(self.imlist, self.explist):
         iraf.imarith(operand1=image + '[1]', op='*', operand2=exptime, \
                      result=image[:9] + '_counts.fits')
示例#18
0
def custom1(filename): # for NACO timing mode cubes - removes horizontal banding
    #iraf.imarith(filename,'-','dark','temp')
    iraf.imarith(filename,'/','flatK','temp')
    im = pyfits.getdata('temp.fits')
    med = median(im.transpose())
    out = ((im).transpose()-med).transpose()
    (pyfits.ImageHDU(out)).writeto("temp2.fits",clobber=True)
    iraf.imdel('temp')
    iraf.imcopy('temp2[1]','temp')
示例#19
0
 def test_imarith(self):
     iraf.imarith('dev$pix', '/', '1', 'image.real', pixtype='r')
     with fits.open('image.real.fits') as f:
         assert f[0].header['BITPIX'] == -32
         assert f[0].data.shape == (512, 512)
     iraf.imarith('dev$pix', '/', '1', 'image.dbl', pixtype='d')
     with fits.open('image.dbl.fits') as f:
         assert f[0].header['BITPIX'] == -64
         assert f[0].data.shape == (512, 512)
示例#20
0
    def test_hedit(self):
        if os.path.exists('image.real.fits'):
            os.remove('image.real.fits')

        iraf.imarith('dev$pix', '/', '1', 'image.real', pixtype='r')
        iraf.hedit('image.real', 'title', 'm51 real', verify=False,
                   Stdout="/dev/null")
        with fits.open('image.real.fits') as f:
            assert f[0].header['OBJECT'] == 'm51 real'
示例#21
0
 def normalise(self, out=None):
     """
     Normalise the image.
     """
     if out is None:
         name, ext = os.path.splitext(self.filename)
         out = name + "_norm" + ext
     iraf.imarith(self.filename, "/", self.MEAN, out)
     return out
def run_scldark(input):
    iraf.image(_doprint=0)
    iraf.image.imutil(_doprint=0)
    scl=float(input[1])/float(input[2])
#    print input[1],scl
    iraf.imarith(input[0],"*",scl,input[3],divzero="0.0",hparams="",pixtype="",calctype="",verbose="no",noact="no")
    iraf.hedit(input[3],"EXPTIME",input[1], add="yes", addonly="yes", delete="no", verify="no", show="no", update="yes")
    iraf.hedit(input[3],"DARKTIME",input[1], add="yes", addonly="yes", delete="no", verify="no", show="no", update="yes")
    iraf.hedit(input[3],"EXPOSURE","", add="no", addonly="no", delete="yes", verify="no", show="no", update="yes")
示例#23
0
 def test_imarith(self):
     iraf.imarith('dev$pix', '/', '1', 'image.real', pixtype='r')
     with fits.open('image.real.fits') as f:
         assert f[0].header['BITPIX'] == -32
         assert f[0].data.shape == (512, 512)
     iraf.imarith('dev$pix', '/', '1', 'image.dbl', pixtype='d')
     with fits.open('image.dbl.fits') as f:
         assert f[0].header['BITPIX'] == -64
         assert f[0].data.shape == (512, 512)
示例#24
0
def DarkSubtract(images, cal_path, outbase="_d"):
	
	# Get a list of dark images
	darks = [FindDarkFrame(i, cal_path) for i in images]
	out = [os.path.splitext(i)[0]+outbase+'.fits' for i in images]

	iraf.imarith.unlearn() # initial imarith setup
	iraf.imarith.mode='h'

	print "\n******************"
	print "Dark Subtracting: "
	print "******************"
	for i in range(len(images)):

		iexp_time = GetHeaderKeyword(images[i], 'exptime')
		dexp_time = GetHeaderKeyword(darks[i], 'exptime')

		# If exposure times don't match, scale the dark and fill in the operand2 field
		if iexp_time != dexp_time:


			iraf.imarith.operand1 = darks[i]
			iraf.imarith.operand2 = iexp_time/dexp_time	
			iraf.imarith.op = '*'

			# Create temporary dark filename
			tdark = os.path.join(os.path.dirname(darks[i]),'tmpdark')

			# If old scaled dark is hanging around, remove it.
			if os.path.exists(tdark+'.fits'):
				os.remove(tdark+'.fits')

			iraf.imarith.result = tdark

			try: 
				iraf.imarith()
			except iraf.IrafError, e:
				print "Iraf exception creating scaled dark"
				print e
		
			iraf.imarith.operand2 = tdark

		else: 
			iraf.imarith.operand2 = darks[i]

		# Perform the dark subtraction
		iraf.imarith.operand1 = images[i]
		iraf.imarith.op = '-'
		iraf.imarith.result = out[i]
		print iraf.imarith.operand1 + ' - ' + iraf.imarith.operand2

		try :
			iraf.imarith()
		except iraf.IrafError, e:
			print "Iraf exception subtracting dark frame"
			print e
示例#25
0
def makeTelluricCorrection(
    telluricDirectory, path, continuuminter, hlineinter, tempInter, hline_method="vega", spectemp="",
    mag="", log="test.log", over=False):
    """FLUX CALIBRATION
    Consists of this start function and six required functions at the end of
    this file.
    """
    """iraf.gemini(_doprint=0, motd="no")
    iraf.gnirs(_doprint=0)
    iraf.imutil(_doprint=0)
    iraf.onedspec(_doprint=0)
    iraf.nsheaders('nifs',Stdout='/dev/null')"""
    # Overview of Telluric Correction procedure:
    # We make a telluric correction by:
    # Remove H-lines from combined 1D standard star spectrum.
    # Divide by H-line corrected standard spectrum by continuum fit.
    # We apply a telluric correction by:
    # Dividing the cube by the correction spectrum (with iraf.telluric) to figure out the shift and scaling.
    # Dividing again by the continuum to add a continuum shape back in.
    # Telluric correction done.


    # Overview of flux calibration procedure:
    # Make a blackbody spectrum.
    # Scale to the observed magnitude of the standard.
    # Multiply telluric corrected target spectrum by this scaled blackbody.
    # Done!
    iraffunctions.chdir(telluricDirectory)

    logging.info('I am starting to create telluric correction spectrum and blackbody spectrum')
    logging.info('I am starting to create telluric correction spectrum and blackbody spectrum ')

    # Open the combine extracted 1d spectrum.
    try:
        combined_extracted_1d_spectra = str(open('telluricfile', 'r').readlines()[0]).strip()
    except:
        logging.info("No telluricfile found in " + str(telluricDirectory) + "Skipping telluric correction and flux calibration.")
        return
    if not os.path.exists('scienceMatchedTellsList'):
        logging.info("No scienceMatchedTellsList found in " + str(telluricDirectory))
        return
    telheader = astropy.io.fits.open(combined_extracted_1d_spectra+'.fits')
    grating = telheader[0].header['GRATING'][0]

    # Get standard star spectral type, teff, and magnitude from the interwebs. Go forth, brave parser!
    getStandardInfo(path, mag, grating, spectemp)

    hLineCorrection(combined_extracted_1d_spectra, grating, path, hlineinter, tempInter, hline_method, log, over)

    # Fit a continuum from the standard star spectrum, saving both continuum and continuum divided standard spectrum.
    fitContinuum(continuuminter, tempInter, grating)
    # Divide the standard star spectrum by the continuum to normalize it.
    if os.path.exists("telluricCorrection.fits"):
        os.remove("telluricCorrection.fits")
    iraf.imarith('final_tel_no_hlines_no_norm', "/", 'fit', result='telluricCorrection',title='',divzero=0.0,hparams='',pixtype='',calctype='',verbose='no',noact='no',mode='al')
def MatchNSubtract(TargetImg,Template,OutputImage,fitgeometry="general"):
    """ Creates OutputImage =  TargetImg - Template after scaling and matching Template to TargetImg.
    fitgeometry can be set to 'rotate' when the Template is also TIRSPEC data
    Otherwise if Template is 2MASS or other instrument set it as 'general'  """
    
    AlignedImg = os.path.splitext(TargetImg)[0]+"_"+os.path.basename(Template)
    AlignedImg = os.path.splitext(AlignedImg)[0][:115]+'.fits' # Reduce filename length for iraf geopmap
    TransformDBfile = AlignImage(TargetImg,Template,AlignedImg,fitgeometry=fitgeometry)
    
    # Now get the Good sky region coordinates
    SkyCoordsFile = os.path.splitext(TargetImg)[0]+'_BlankSky.coo'
    if not os.path.isfile(SkyCoordsFile) :
        iraf.display(TargetImg,1)
        print ('For taking coordinates of good sky. Press _x_ over blank sky areas.')
        imx=iraf.imexam(Stdout=1)
        with open(SkyCoordsFile,'w') as foo :    #Creating blank sky coords files
            for line in imx :               
                foo.write(line.split()[0] +'  '+line.split()[1]+'\n')

    # Now get the regions in the image whose brightness has to be cancelled by scaling
    FluxCoordsFile = os.path.splitext(TargetImg)[0]+'_FluxRegions.coo'
    if not os.path.isfile(FluxCoordsFile) :
        iraf.display(TargetImg,1)
        print ('Press _x_ over areas you want to minimise the residual flux after subtraction')
        imx=iraf.imexam(Stdout=1)
        with open(FluxCoordsFile,'w') as foo :    #Creating Flux ares which we have to remove in subtraction
            for line in imx :               
                foo.write(line.split()[0] +'  '+line.split()[1]+'\n')

    #Now we first has to remove background from both the images.
    TargetSkySubtractedFile = os.path.splitext(TargetImg)[0]+'_SkyS.fits'
    if not os.path.isfile(TargetSkySubtractedFile):
        skyvalue = SkySubtractImage(TargetImg,TargetSkySubtractedFile,SkyCoordsFile)
    else:
        print('Warning: Using old {0} file'.format(TargetSkySubtractedFile))

    AlignedSkySubtractedFile = os.path.splitext(AlignedImg)[0]+'_SkyS.fits'
    if not os.path.isfile(AlignedSkySubtractedFile):
        skyvalue = SkySubtractImage(AlignedImg,AlignedSkySubtractedFile,SkyCoordsFile)
    else:
        print('Warning: Using old {0} file'.format(AlignedSkySubtractedFile))

    #We shall now extract the totel Flux in each tiles from both the images
    TargetFluxinTiles = ExtractTiles(TargetSkySubtractedFile,FluxCoordsFile,Summeryfunction=np.sum,hsize=7*1.5)
    TemplateFluxinTiles = ExtractTiles(AlignedSkySubtractedFile,FluxCoordsFile,Summeryfunction=np.sum,hsize=7*1.5)
    
    def DiffSquareSum(x):
        return np.sum([(targetF - x*templateF)**2 for targetF,templateF in zip(TargetFluxinTiles,TemplateFluxinTiles)])
    
    res = scipy.optimize.minimize_scalar(DiffSquareSum)
    Scale = res.x
    print('Scaling to match the fluxes is {0}'.format(Scale))
    iraf.imarith(operand1=AlignedSkySubtractedFile,op="*",operand2=Scale,result=os.path.splitext(AlignedSkySubtractedFile)[0]+'M.fits')

    iraf.imarith(operand1=TargetSkySubtractedFile,op="-",operand2=os.path.splitext(AlignedSkySubtractedFile)[0]+'M.fits',result=OutputImage)
示例#27
0
	def get_median(self):
		'''
		Calculates the median subtracted image used to compute the pixel mask
		'''
		iraf.median(input = self.image, output = 'tmp_med.fits', 
					xwindow = 40, ywindow = 40, verbose ='No')

		iraf.imarith(operand1 = self.image, operand2 = 'tmp_med.fits[0]', 
					 op = '-', result = self.output + '_sub.fits')
		
		os.remove('tmp_med.fits')
示例#28
0
def imarith(operand1, op, operand2, result):
    from pyraf import iraf
    iraf.images()

    pars = iraf.imarith.getParList()
    iraf.imcombine.unlearn()

    print "%s %s %s -> %s" % (operand1, op, operand2, result)
    iraf.imarith(operand1=operand1, op=op, operand2=operand2, result=result)

    iraf.imarith.setParList(pars)
示例#29
0
def FlattenData():
    filters = ['J', '1113', '1184']
    for filt in filters:
        s = 'm*' + filt + '*.fits'
        files = glob.glob(s)
        flat = 'flat' + filt
        for file in files:
            out = 'f' + file
            print file, " -> ", out
            iraf.imarith(file, '/', flat, out)
            iraf.display(out, 1)
示例#30
0
def mask_norm(mask):
    # normalizes mask.
    # test if trimmed mask exists:
    if os.path.isfile(mask):
        iraf.cd(os.path.dirname(mask))
	hdulist = pyfits.open(mask)
	imdata = hdulist[0].data
	hdulist.close()
	outname = mask.replace('_trimmed.fits', '_normed.fits')
	print imdata.max()
	iraf.imarith(os.path.basename(mask),"/",imdata.max(),os.path.basename(outname))
示例#31
0
    def test_hedit(self):
        if os.path.exists('image.real.fits'):
            os.remove('image.real.fits')

        iraf.imarith('dev$pix', '/', '1', 'image.real', pixtype='r')
        iraf.hedit('image.real',
                   'title',
                   'm51 real',
                   verify=False,
                   Stdout="/dev/null")
        with fits.open('image.real.fits') as f:
            assert f[0].header['OBJECT'] == 'm51 real'
示例#32
0
文件: rcred.py 项目: rswalters/sedmpy
def get_masked_image(img):

    # Running IRAF
    iraf.noao(_doprint=0)
    mask = make_mask_cross(img)
    masked = img.replace(".fits", "_masked.fits")

    if (os.path.isfile(masked)):
        os.remove(masked)
    iraf.imarith(img, "*", mask, masked)

    return masked
示例#33
0
def MEFarith(MEF, image, op, result):

    if os.path.exists(result):
        os.remove(result)
    iraf.fxcopy(input=MEF+'[0]', output=result)
    for i in range(1,88):
        iraf.fxinsert(input=MEF+'['+str(i)+']', output=result+'['+str(i)+']', groups='', verbose = 'no')
    for i in range(1,88):
        header = astropy.io.fits.open(result)
        extname = header[i].header['EXTNAME']
        if extname == 'SCI':
            iraf.imarith(operand1=result+'['+str(i)+']', op=op, operand2 = image, result = result+'['+str(i)+', overwrite]', divzero = 0.0)
示例#34
0
def select_PSF(ID,psfdir,focus,xc,yc,hsize=50):
    """Function helper to get psf image from focus value"""
    if focus<=-6.5:
        focus=-6.
    if focus<=-8.0:
        focus=-10.
    psf_cat="%s/TinyTim_f%i.cat"%(psfdir,round(focus,0))
    psf_img="%s/TinyTim_f%i.fits"%(psfdir,round(focus,0))
    xs,ys,num=gfh.get_sex_pars(xc,yc,rmax=330,catfile=psf_cat,psf=True)
    X,Y=xs[num],ys[num]
    iraf.imarith("%s[%i:%i,%i:%i]"%(psf_img,X-hsize,X+hsize,Y-hsize,Y+hsize),'*',1.0,'psf.fits')
    return
示例#35
0
文件: rcred.py 项目: scizen9/kpy
def get_masked_image(img):

    # Running IRAF
    iraf.noao(_doprint=0)  
    mask = make_mask_cross(img)    
    masked = img.replace(".fits", "_masked.fits")
    
    if (os.path.isfile(masked)):
        os.remove(masked)
    iraf.imarith(img, "*", mask, masked)
    
    return masked
示例#36
0
def debias_flats(flats, master_bias='master_bias.fits'):
    """
    Subract master bias from flats
    :param flats (str[][]): Pairs of flat frames taken at different exposure levels.
    Output for use in calc_gain (analysis.py)
    """
    for pair in flats:
        [flat_1, flat_2] = pair
        # Subtract bias from both images
        iraf.imarith(operand1=flat_1, operand2=master_bias,
                     op='-', result=flat_1[:-5]+'_debiased.fits')
        iraf.imarith(operand1=flat_2, operand2=master_bias,
                     op='-', result=flat_2[:-5]+'_debiased.fits')
def psfconv(df_image,psf):
    print "\n************ Running the psf convolution steps ************\n"

    iraf.imdel('_model.fits')
    iraf.imdel('_model_4.fits')
    iraf.imdel('_res*.fits')
    iraf.imdel('_psf*.fits')
    iraf.imdel('_df_sub')
    
    'subtract the sky value from the dragonfly image header'
    try:
        df_backval = fits.getheader(df_image)['BACKVAL']
        iraf.imarith('%s'%df_image,'-','%s'%df_backval,'_df_sub')
    except:
        print "WARNING: No BACKVAL to subtract!  Skipping the background subtraction..."
        iraf.imcopy('%s'%df_image,'_df_sub.fits')
    
    ##### subtract the background from the cfht image?
    
    'convolve the model with the Dragonfly PSF'
    if usemodelpsf:
        makeallisonspsf()
        psf = './psf/psf_static_fullframe.fits'
        
    if verbose:
        print 'VERBOSE:  Using %s for the psf convolution.'%psf
        
    'resample the PSF by a factor of 4'
    iraf.magnify('%s'%psf,'_psf_4',4,4,interp="spline3")
    
    'this is just to retain the same total flux in the psf'
    iraf.imarith('_psf_4','*',16.,'_psf_4')
    
    'Convolve with the psf'
    # from scipy import signal
    # fluxmoddata,fluxmodheader = fits.getdata('_fluxmod_dragonfly.fits',header=True)
    # psfdata = fits.getdata('_psf_4.fits')
    # fluxmodheader['COMMENT']='convolved with '+'_psf_4.fits'
    # modeldata = signal.fftconvolve(fluxmoddata, psfdata)
    # print ""
    # print fluxmoddata.shape
    # print modeldata.shape
    # print ""
    # writeFITS(modeldata,fluxmodheader,'_model_4.fits')
    iraf.stsdas.analysis.fourier.fconvolve('_fluxmod_dragonfly','_psf_4','_model_4')
    
    'now after the convolution we can go back to the Dragonfly resolution'
    iraf.blkavg('_model_4','_model',4,4,option="average")
    
    
    return None
def sub_bias(image, combined_bias, image_b):
# Import IRAF modules:
  iraf.images(_doprint=0)
  iraf.imutil(_doprint=0)
  parList = "bias_subtraction_imarith.par"
# Check input file and combined_bias frame exists before proceeding:
  if os.path.isfile(image) == True:
    if os.path.isfile(combined_bias) == True:
      if os.path.isfile(parList) == True:
# Subtract combined bias frame from input frame (object or flat)
# using IRAF task imarithmetic:
        iraf.imarith.setParList(ParList="bias_subtraction_imarith.par")
        iraf.imarith(operand1=image, operand2=combined_bias, result=image_b)
        print ' '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print 'Bias frame ' + str(combined_bias) 
        print 'subtracted from input ' + str(image)
        print 'to create ' + str(image_b)
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print ' '
      else:
        print ' '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print 'Bias frame subtraction IRAF .par file              ' 
        print str(parList)
        print 'does not exist. Exiting script.                    '
        print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
        print ' '
        print ' '
        sys.exit()
    else:
      print ' '
      print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
      print 'Combined bias frame                                ' 
      print str(combined_bias)
      print 'does not exist. Exiting script.                    '
      print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
      print ' '
      print ' '
      sys.exit()
  else:
    print ' '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print 'Input frame                                        ' 
    print str(image)
    print 'does not exist. Exiting script.                    '
    print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
    print ' '
    print ' '
    sys.exit()
示例#39
0
def reduceimg(reduction_dir):
    # reduction_dir='/mnt/hgfs/Mikesdata/wise-dec2016/U1999'
    os.chdir(reduction_dir)
    masterdir = reduction_dir + 'Masters/'

    _createMasters(reduction_dir)
    santinizeOutputbdf()

    iraf.imarith(operand1='@llist',
                 op='-',
                 operand2=masterdir + 'Dark.fit',
                 result='@llist//-bd')
    os.system('rm ' + masterdir + 'combinedFlat.fit')
    os.system('rm ' + masterdir + 'combinedFlatNormed.fit')
    iraf.imarith(operand1=masterdir + 'Flat.fit',
                 op='-',
                 operand2=masterdir + 'Zero.fit',
                 result=masterdir + 'combinedFlat.fit')
    med = np.mean(fits.getdata(masterdir + 'combinedFlat.fit'))
    iraf.imarith(operand1=masterdir + 'combinedFlat.fit',
                 op='/',
                 operand2=med,
                 result=masterdir + 'combinedFlatNormed.fit')
    iraf.imarith(operand1='@llist//-bd',
                 op='/',
                 operand2=masterdir + 'combinedFlatNormed.fit',
                 result='@llist//-bdf')
示例#40
0
文件: SCRIPT.py 项目: dafh/stuff
def apply_imarith (op1,op,op2):
    if op1.find('fits')!= -1:
       operand = op1
       result = ''
    else:
        operand = '@' + op1
        result = operand
    iraf.imarith.operand1 = operand
    iraf.imarith.op = op
    iraf.imarith.operand2 = op2
    iraf.imarith.result = result
    #print operand,op,op2
    iraf.imarith(mode='h')
    print 'Applying %s to %s' % (op2,op1)
示例#41
0
def CombineImages(imglist,output,method='median',zero='none' ,scale='none',norm=False,
                  reject="avsigclip", statsec='[150:900,150:900]'):
    """ Combined the input list of images and write to output fits file. """
    iraf.imcombine.unlearn()
    imglistfname = os.path.splitext(output)[0]+'.comblist'
    with open(imglistfname,'w') as imgs2combinefile:
        imgs2combinefile.write('\n'.join(imglist)+'\n')
    # Now call iraf imcombine with zero scaling
    combineoutputfile = os.path.splitext(output)[0]+'_un.fits' if norm else output

    iraf.imcombine(input='@'+imglistfname, output=combineoutputfile, combine=method, 
                   reject=reject, statsec=statsec, scale=scale, zero=zero)
    if norm:
        mediancounts = np.median(fits.getdata(combineoutputfile))
        iraf.imarith(operand1=combineoutputfile,op='/',operand2=mediancounts,result=output)
示例#42
0
def DarkSubtract(images, cal_path, outbase="_d"):
	
	# Get a list of dark images
	darks = [FindDarkFrame(i, cal_path) for i in images]
	out = [os.path.splitext(i)[0]+outbase+'.fits' for i in images]

	iraf.imarith.unlearn() # initial imarith setup
	iraf.imarith.mode='h'

	print "\n******************"
	print "Dark Subtracting: "
	print "******************"
	for i in range(len(images)):

		iexp_time = GetHeaderKeyword(images[i], 'exptime')
		dexp_time = GetHeaderKeyword(darks[i], 'exptime')

		# If exposure times don't match, scale the dark and fill in the operand2 field
		if iexp_time != dexp_time:
			iraf.imarith.operand1 = darks[i]
			iraf.imarith.operand2 = iexp_time/dexp_time	
			iraf.imarith.op = '*'

			# Create temporary dark filename
			tdark = os.path.join(os.path.dirname(darks[i]),'tmpdark')
			iraf.imarith.result = tdark
			iraf.imarith()
		
			iraf.imarith.operand2 = tdark

		else: 
			iraf.imarith.operand2 = darks[i]

		# Perform the dark subtraction
		iraf.imarith.operand1 = images[i]
		iraf.imarith.op = '-'
		iraf.imarith.result = out[i]
		print iraf.imarith.operand1 + ' - ' + iraf.imarith.operand2
		iraf.imarith()

		# Remove the scaled dark
		if iexp_time != dexp_time:
			os.remove(tdark+'.fits')

	# Make sure out files actually exist 
	out = filter(os.path.exists, out)
	
	return out
示例#43
0
	def squareMyself(self):

		# Square the image
		newObjectName = self._Name.replace(".fits", "_sqrd.fits")

		# Check for output
		self.cleanOutputFiles(newObjectName)

		# Run iraf
		iraf.imarith(operand1=self._Name,
				operand2=self._Name,
				op="*",
				result=newObjectName
			)

		return newObjectName 
示例#44
0
    def get_median(self):
        '''
		Calculates the median subtracted image used to compute the pixel mask
		'''
        iraf.median(input=self.image,
                    output='tmp_med.fits',
                    xwindow=40,
                    ywindow=40,
                    verbose='No')

        iraf.imarith(operand1=self.image,
                     operand2='tmp_med.fits[0]',
                     op='-',
                     result=self.output + '_sub.fits')

        os.remove('tmp_med.fits')
def zeropadfits(smfits, bigfits, padfits):
    """Pads smfits with zeros to match size of bigfits.
       Result is padfits, centered as was smfits.
       Assumes smfits & bigfits are squares w/ odd # of pixels across.
    """
    NY, NX = fits.getheader(bigfits)["NAXIS2"], fits.getheader(bigfits)["NAXIS1"]
    ny, nx = fits.getheader(smfits)["NAXIS2"], fits.getheader(smfits)["NAXIS1"]
    print "\nPadding 'smfits' at %ix%i to match 'bigfits' at %ix%i\n" % (nx, ny, NX, NY)
    center = (NY + 1) / 2
    border = ny / 2
    lo = center - border
    hi = center + border
    croprange = "[%d:%d,%d:%d]" % (lo, hi, lo, hi)

    imarith(bigfits, "*", 0, padfits)
    imcopy(smfits, padfits + croprange)
def create_masterflat(WORK_DIR):
  print '\n + Creating masterflat\n'

  try: os.remove('masterflat.fits')
  except: pass

  # subtract masterbias from flats
  for flat in observations[WORK_DIR]['flats']:
    try: os.remove(flat+'.fits')
    except: pass
    iraf.imarith(operand1=observations[WORK_DIR]['ORIG_DIR']+flat, op='-', operand2='masterbias.fits', result=flat, Stdout="/dev/null")
  
  iraf.zerocombine(input=','.join([flat for flat in observations[WORK_DIR]['flats']]), output='masterflat.fits', combine='median', reject='none', ccdtype='', process='no')

  for flat in observations[WORK_DIR]['flats']:
    os.remove(flat+'.fits')
示例#47
0
def make_flat(images, outflat, gain=1.0, rdnoise=0.0, xwindow=50,
              ywindow=50, hmin=0, hmax=65535, lowclip=0.7, highclip=1.3):

    '''Construct flat field from individual frames'''
    
    flatimages=','.join(images)
    iraf.flatcombine(flatimages, output='flat1', combine='median', 
                     reject='avsigclip', ccdtype='', process=no, subsets=no,
                     delete=no, clobber=no, scale='median', lsigma=3.0,
                     hsigma=3.0, gain=gain, rdnoise=rdnoise)
    iraf.fmedian('flat1', 'flat2', xwindow, ywindow, hmin=hmin, hmax=hmax)
    iraf.imarith('flat1', '/',  'flat2', outflat)
    iraf.imreplace(outflat, 1.0, lower=INDEF, upper=lowclip)
    iraf.imreplace(outflat, 1.0, lower=highclip, upper=INDEF)

    return
示例#48
0
def trim_remove_bias(WhatToTrim):
    imstat = WhatToTrim + "[2098:2147,*]"
    mean_overscan = iraf.imstat(imstat, Stdout=1, fields="mean", format="no")

    single_file_new = WhatToTrim.replace(".fits",
                                         ".-overscan_will_be_removed.fits")
    iraf.imarith(WhatToTrim, "-", mean_overscan[0], single_file_new)

    old = single_file_new + "[51:2097,3:2063]"
    new = WhatToTrim.replace(".fits", ".trim_will_be_removed.fits")

    iraf.imcopy(old, new)

    os.remove(single_file_new)

    return new
示例#49
0
文件: reduction.py 项目: chaorun/ycas
def normalize_flats(files):
    """ Normalize a set of flat files. 
    
    This function receives a list of flat files and returns a list of
    files of the flat files after normalize them.
    The normalization is performed dividing the flat image by the mean
    value of the flat image. This mean is the result of applying imstat
    to each image.
    
    Args: 
        files: The names of the files corresponding to the flat images.
    
    """

    for fl in files:
        # Get the 'work' and 'normalized' names for the flat files to process.
        work_file = fl.replace("." + FIT_FILE_EXT, WORK_FILE_SUFFIX)

        norm_file = fl.replace("." + FIT_FILE_EXT, NORM_FILE_SUFFIX)

        # Getting statistics for flat file.
        try:
            flat_stats = iraf.imstat(work_file, fields=IMSTAT_MEAN, Stdout=1)
            flat_stats = flat_stats[IMSTAT_FIRST_VALUE]

            try:
                mean_value = float(flat_stats)

                # Normalize flat dividing flat by its mean value.
                iraf.imarith(work_file, '/', mean_value, norm_file)

            except iraf.IrafError as exc:
                logging.error(
                    "Error executing imarith: normalizing flat image: %s" %
                    (fl))
                logging.error("Iraf error is: %s" % (exc))

            except ValueError as ve:
                logging.error("Error calculating mean value for: %s" %
                              (flat_stats))
                logging.error("Error is: %s" % (ve))

        except iraf.IrafError as exc:
            logging.error(
                "Error executing imstat: getting stats for flat image: %s" %
                (fl))
            logging.error("Iraf error is: %s" % (exc))
示例#50
0
def diffDarOnOff(cleanDir1, cleanDir2):
    files1tmp = glob.glob(cleanDir1 + '/c????.fits')
    files2tmp = glob.glob(cleanDir2 + '/c????.fits')

    for f1 in files1tmp:
        cname1 = f1.split('/')[-1]

        for f2 in files2tmp:
            cname2 = f2.split('/')[-1]

            if (cname1 == cname2):
                outname = cname1.replace('c', 'diff')

                print 'IMARITH: %s - %s = %s' % (cname1, cname2, outname)
                if (os.path.exists(outname)):
                    iraf.imdelete(outname)
                iraf.imarith(f1, '-', f2, outname)
示例#51
0
    def _op(self, image, name, symbol, out):
        """
        Apply a particular operation on this image and another image.
        """
        if type(image) == Image:
            filename = image.filename
        else:
            filename = image
        if out is None:
            name1, ext1 = os.path.splitext(self.filename)
            name2, _ = os.path.splitext(filename)
            path1, name1 = os.path.split(name1)
            _, name2 = os.path.split(name2)
            out = os.path.join(path1, name1 + "_%s_"  % name + name2 + ext1)
        iraf.imarith(self.filename, symbol, filename, out)

        return out
示例#52
0
文件: reduction.py 项目: felgari/ycas
def normalize_flats(files):
    """ Normalize a set of flat files. 
    
    This function receives a list of flat files and returns a list of
    files of the flat files after normalize them.
    The normalization is performed dividing the flat image by the mean
    value of the flat image. This mean is the result of applying imstat
    to each image.
    
    Args: 
        files: The names of the files corresponding to the flat images.
    
    """
    	
    for fl in files:
        # Get the 'work' and 'normalized' names for the flat files to process.
        work_file = fl.replace("." + FIT_FILE_EXT, WORK_FILE_SUFFIX)
        
        norm_file = fl.replace("." + FIT_FILE_EXT, NORM_FILE_SUFFIX)
        
        # Getting statistics for flat file.
        try:
            flat_stats = iraf.imstat(work_file, fields=IMSTAT_MEAN, Stdout=1)
            flat_stats = flat_stats[IMSTAT_FIRST_VALUE]    
            
            try:
                mean_value = float(flat_stats)
                                
                # Normalize flat dividing flat by its mean value.
                iraf.imarith(work_file, '/', mean_value, norm_file)
    			
            except iraf.IrafError as exc:
                logging.error("Error executing imarith: normalizing flat image: %s" %
                              (fl))
                logging.error("Iraf error is: %s" % (exc))
                
            except ValueError as ve:     
                logging.error("Error calculating mean value for: %s" % 
                              (flat_stats))
                logging.error("Error is: %s" % (ve))                      
    	
        except iraf.IrafError as exc:
            logging.error("Error executing imstat: getting stats for flat image: %s" %
                          (fl))
            logging.error("Iraf error is: %s" % (exc))       
示例#53
0
文件: reduction.py 项目: felgari/ycas
def reduce_flats(flat_files, masterbias_name):
    """Reduce the flat files received subtracting the masterbias indicated.
    
    Args:
        flat_files: The flat files to reduce
        
    """
    
    for ff in flat_files: 
        
        work_file = ff.replace(".fit", WORK_FILE_SUFFIX)
    
        try:
            # Create the work file subtracting the masterbias from the flat.
            iraf.imarith(ff, IMARITH_SUBTRACT, masterbias_name, work_file)
                
        except iraf.IrafError as exc:
            logging.error("Error in imarith. Subtracting masterbias to %s" % ff)
示例#54
0
def continuumReduce(imageName,imageHaName):

  avgScaleFactor = contsubfactor

  print("The scale factor is "+str(avgScaleFactor))
  
  #configure hedit
  iraf.hedit.add='yes'
  iraf.hedit.verify='no'
  
  iraf.imarith(imageName+'.fits',"*",avgScaleFactor,imageName+"_scaled.fits")
  iraf.imarith(imageHaName+'.fits',"-",imageName+"_scaled.fits",imageHaName+"cs.fits")
  iraf.hedit(imageHaName+'cs.fits','Rscale',avgScaleFactor) 
  
  #clear up superfluous images
  for f in (imageName+'_scaled.fits'):
    silentDelete(f)

  print('The continuum subtracted image is '+ imageHaName+'cs.fits')
示例#55
0
def BiasSubtract(images, cal_path, outbase="_b"):
#"""
# 	Locate and subtract a bias frame from a list of input images#
#
# PARAMETERS:
# 	images - list of images to process
#	cal_path - path to calibration data
#
# OPTIONAL PARAMETERS
#	outbase - string to append to the end of the filename
#
# RETURNS:
#	List of bias subtracted images
#"""
	
	# Get a list of bias frames for these images.
	bias_frames = [FindBiasFrame(i, cal_path) for i in images]	
		
	# Build the list of output image names
	out = [os.path.splitext(i) for i in images]	# Split off the extension
	out = [i[0] + outbase + '.fits' for i in out]	# Paste the outbase at the end of the filename 
												# and put the extension back on
	# run imarith to do the bias subraction	
	iraf.imarith.unlearn()
	iraf.imarith.op = '-'
	iraf.imarith.mode = 'h'

	print "\n******************"
	print "Bias Subtracting: "
	print "******************"
	for i in range(len(images)):
		iraf.imarith.operand1 = images[i]
		iraf.imarith.operand2 = bias_frames[i]
		iraf.imarith.result = out[i]

		print images[i] + " - " + bias_frames[i]
		iraf.imarith()

    # Make sure that imarith actually created the output files.
    # Filter out any output files that don't exist.
	out = filter(os.path.exists, out)
        
	return out
示例#56
0
def flatten_spectra(input_file,output_file, error=None):

    iraf.fit1d(input_file,'tmp.fits',type='fit',interact=False,
               functio='legendre',order=2)
    iraf.imarith(input_file,'/','tmp.fits','tmp2.fits')

    hdu = pyfits.open('tmp2.fits')[0]
    header = hdu.header
    data = hdu.data
    t = np.r_[data.T,data.T,data.T]
    pyfits.PrimaryHDU(t.T,header).writeto('tmp3.fits')
    
    iraf.fit1d('tmp3.fits','tmp4.fits',type='fit',interact=False,
               functio='spline3',order=100,low_rej=2.,high_rej=2.)
    hdu2 = pyfits.open('tmp4.fits')[0]
    data2 = hdu2.data
    pyfits.PrimaryHDU(data2[:,data2.shape[1]/3.:data2.shape[1]*2./3.],
                      header).writeto('tmp5.fits')
    iraf.imarith('tmp5.fits','*','tmp.fits','tmp6.fits')
    d6 = pyfits.open('tmp6.fits')[0].data
    # d7 = d6/np.mean(d6,axis=1)[:,np.newaxis]
    # pyfits.PrimaryHDU(d7,header).writeto('tmp7.fits')
    iraf.imarith(input_file,'/','tmp6.fits',output_file)
#    os.system('rm tmp*.fits')
    
    return np.mean(d6,axis=1)[:,np.newaxis]