def specselfid(images, outimages, outpref, refimage=None, ystart='middlerow', rstep=3, clobber=False, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # set up the variables infiles = [] outfiles = [] # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse( 'Outimages', outimages, outpref, infiles, '') # set up defaults if saltio.checkfornone(refimage) is not None: rhdu = saltio.openfits(refimage) else: refimage = None # read in rectify each image for img, oimg, in zip(infiles, outfiles): hdu = saltio.openfits(img) log.message( 'Performing self-identification and rectification on %s' % img) for i in range(1, len(hdu)): if hdu[i].name == 'SCI': if refimage is None: sdata = hdu[i].data else: sdata = rhdu[i].data hdu[i].data = selfid( hdu[i].data, sdata, ystart=ystart, rstep=rstep) if saltkey.found('VAREXT', hdu[i]): varext = saltkey.get('VAREXT', hdu[i]) hdu[varext].data = selfid( hdu[varext].data, sdata, ystart=ystart, rstep=rstep) if saltkey.found('BPMEXT', hdu[i]): bpmext = saltkey.get('BPMEXT', hdu[i]) hdu[bpmext].data = selfid( hdu[bpmext].data, sdata, ystart=ystart, rstep=rstep) # write out the oimg saltio.writefits(hdu, oimg, clobber=clobber)
def flat(struct,fstruct): """flat applies a flatfield to salt CCD data return struct """ # Determine the number of extensions nextend=len(struct) #flat field the data for i in range(nextend): if struct[i].name=='SCI' or len(struct)==1: #Account for variance frames if saltkey.found('VAREXT', struct[i]): varext=saltkey.get('VAREXT', struct[i]) if saltkey.found('VAREXT', fstruct[i]): fvarext=saltkey.get('VAREXT', fstruct[i]) fvar=fstruct[fvarext].data else: fvar=0 struct[varext].data=(struct[i].data/fstruct[i].data)*(struct[varext].data/struct[i].data**2+fvar/fstruct[i].data**2) #flatten the data struct[i].data = struct[i].data / fstruct[i].data return struct
def specselfid(images, outimages, outpref, refimage=None, ystart='middlerow', rstep=3, clobber=False, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # set up the variables infiles = [] outfiles = [] # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outimages', outimages, outpref, infiles, '') # set up defaults if saltio.checkfornone(refimage) is not None: rhdu = saltio.openfits(refimage) else: refimage = None # read in rectify each image for img, oimg, in zip(infiles, outfiles): hdu = saltio.openfits(img) log.message( 'Performing self-identification and rectification on %s' % img) for i in range(1, len(hdu)): if hdu[i].name == 'SCI': if refimage is None: sdata = hdu[i].data else: sdata = rhdu[i].data hdu[i].data = selfid(hdu[i].data, sdata, ystart=ystart, rstep=rstep) if saltkey.found('VAREXT', hdu[i]): varext = saltkey.get('VAREXT', hdu[i]) hdu[varext].data = selfid(hdu[varext].data, sdata, ystart=ystart, rstep=rstep) if saltkey.found('BPMEXT', hdu[i]): bpmext = saltkey.get('BPMEXT', hdu[i]) hdu[bpmext].data = selfid(hdu[bpmext].data, sdata, ystart=ystart, rstep=rstep) # write out the oimg saltio.writefits(hdu, oimg, clobber=clobber)
def saltgain(images, outimages, outpref, gaindb=None, usedb=False, mult=True, clobber=True, logfile='salt.log', verbose=True): #start logging with logging(logfile, debug) as log: # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outfile', outimages, outpref, infiles, '') #verify that the input and output lists are the same length saltio.comparelists(infiles, outfiles, 'Input', 'output') # read in the database file if usedb is true if usedb: gaindb = gaindb.strip() dblist = saltio.readgaindb(gaindb) else: dblist = [] for img, oimg in zip(infiles, outfiles): #open the fits file struct = saltio.openfits(img) # identify instrument instrume, keyprep, keygain, keybias, keyxtalk, keyslot = saltkey.instrumid( struct) # has file been prepared already? if saltkey.found(keygain, struct[0]): message = 'SALTGAIN: %s has already been gain-corrected' % img raise SaltError(message) # gain correct the data struct = gain(struct, mult=mult, usedb=usedb, dblist=dblist, log=log, verbose=verbose) # housekeeping keywords fname, hist = history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0], keygain, 'Images have been gain corrected', hist) # write FITS file saltio.writefits(struct, oimg, clobber=clobber) saltio.closefits(struct)
def illum_cor(struct,mbox): """Apply an illumination correction to a set of images return struct """ # Determine the number of extensions nextend=len(struct) #flat field the data for i in range(nextend): if struct[i].name=='SCI' or len(struct)==1: #create the median image mdata=median_filter(struct[i].data, size=(mbox, mbox)) #create the data struct[i].data=struct[i].data/mdata #Account for variance frames if saltkey.found('VAREXT', struct[i]): varext=saltkey.get('VAREXT', struct[i]) struct[varext].data=struct[varext].data/mdata return struct
def saltxtalk(images,outimages,outpref,xtalkfile=None, usedb=False, clobber=True, logfile='salt.log',verbose=True): #start logging with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') # are input and output lists the same length? saltio.comparelists(infiles,outfiles,'Input','output') # does crosstalk coefficient data exist if usedb: xtalkfile = xtalkfile.strip() xdict = saltio.readxtalkcoeff(xtalkfile) else: xdict=None for img, oimg in zip(infiles, outfiles): #open the fits file struct=saltio.openfits(img) #find the best xcoeff for the image if using the db if usedb: obsdate=saltkey.get('DATE-OBS', struct[0]) obsdate=int('%s%s%s' % (obsdate[0:4],obsdate[5:7], obsdate[8:])) xkey=np.array(xdict.keys()) date=xkey[abs(xkey-obsdate).argmin()] xcoeff=xdict[date] else: xcoeff=[] # identify instrument instrume,keyprep,keygain,keybias,keyxtalk,keyslot = saltkey.instrumid(struct) # has file been prepared already? if saltkey.found(keyxtalk, struct[0]): message='%s has already been xtalk corrected' % img raise SaltError(message) #apply the cross-talk correction struct = xtalk(struct, xcoeff, log=log, verbose=verbose) # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0], 'SXTALK', 'Images have been xtalk corrected', hist) # write FITS file saltio.writefits(struct,oimg, clobber=clobber) saltio.closefits(struct)
def saltslot(images, outimages, outpref, gaindb='', xtalkfile='', usedb=False, clobber=False, logfile='salt.log', verbose=True): #start logging with logging(logfile, debug) as log: # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outfile', outimages, outpref, infiles, '') # are input and output lists the same length? saltio.comparelists(infiles, outfiles, 'Input', 'output') # does crosstalk coefficient data exist if usedb: dblist = saltio.readgaindb(gaindb) xtalkfile = xtalkfile.strip() xdict = saltio.readxtalkcoeff(xtalkfile) else: dblist = [] xdict = None for img, oimg in zip(infiles, outfiles): #open the fits file struct = saltio.openfits(img) # identify instrument instrume, keyprep, keygain, keybias, keyxtalk, keyslot = saltkey.instrumid( struct) # has file been prepared already? if saltkey.found(keygain, struct[0]): message = '%s has already been reduced' % img raise SaltError(message) # housekeeping keywords fname, hist = history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0], keyslot, 'Images have been slotmode reduced', hist) # write FITS file saltio.writefits(struct, oimg, clobber=clobber) saltio.closefits(struct)
def skysubtract(hdu, method='normal', section=[], funct='polynomial', order=2): """For a given image, extract a measurement of the sky from the image and then subtract that measurement from the overall image and write the spectra to the output file """ for i in range(len(hdu)): if hdu[i].name == 'SCI': # set up the data, variance, and bad pixel frames # first step is to find the region to extract data_arr = hdu[i].data if saltkey.found('VAREXT', hdu[i]): var_ext = saltkey.get('VAREXT', hdu[i]) var_arr = hdu[var_ext].data else: var_arr = None var_ext = None try: bpm_ext = saltkey.get('BPMEXT', hdu[i]) bpm_arr = hdu[hdu[i].header['BPMEXT']].data except: bpm_ext = None bpm_arr = None # creat the xarr for the purposes of extracting the data xarr = np.arange(len(data_arr[0])) # TODO: The variance array does not fully work at the moment var_arr = None if method == 'normal': sdata = normalsky(xarr, data_arr, var_arr, section) elif method == 'fit': sdata = fitsky(xarr, data_arr, var_arr, section) # subtract the data hdu[i].data = data_arr - sdata # correct the variance frame if var_ext: hdu[var_ext].data = hdu[var_ext].data # +ap.lvar/nrows return hdu
def saltgain(images,outimages, outpref, gaindb=None,usedb=False, mult=True, clobber=True, logfile='salt.log',verbose=True): #start logging with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') #verify that the input and output lists are the same length saltio.comparelists(infiles,outfiles,'Input','output') # read in the database file if usedb is true if usedb: gaindb = gaindb.strip() dblist= saltio.readgaindb(gaindb) else: dblist=[] for img, oimg in zip(infiles, outfiles): #open the fits file struct=saltio.openfits(img) # identify instrument instrume,keyprep,keygain,keybias,keyxtalk,keyslot = saltkey.instrumid(struct) # has file been prepared already? if saltkey.found(keygain, struct[0]): message='SALTGAIN: %s has already been gain-corrected' % img raise SaltError(message) # gain correct the data struct = gain(struct,mult=mult, usedb=usedb, dblist=dblist, log=log, verbose=verbose) # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0],keygain, 'Images have been gain corrected', hist) # write FITS file saltio.writefits(struct,oimg, clobber=clobber) saltio.closefits(struct)
def saltslot(images,outimages,outpref,gaindb='',xtalkfile='',usedb=False, clobber=False,logfile='salt.log',verbose=True): #start logging with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') # are input and output lists the same length? saltio.comparelists(infiles,outfiles,'Input','output') # does crosstalk coefficient data exist if usedb: dblist= saltio.readgaindb(gaindb) xtalkfile = xtalkfile.strip() xdict = saltio.readxtalkcoeff(xtalkfile) else: dblist=[] xdict=None for img, oimg in zip(infiles, outfiles): #open the fits file struct=saltio.openfits(img) # identify instrument instrume,keyprep,keygain,keybias,keyxtalk,keyslot = saltkey.instrumid(struct) # has file been prepared already? if saltkey.found(keygain, struct[0]): message='%s has already been reduced' % img raise SaltError(message) # housekeeping keywords fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0],keyslot, 'Images have been slotmode reduced', hist) # write FITS file saltio.writefits(struct,oimg, clobber=clobber) saltio.closefits(struct)
def updateheaders(struct, ext, tdiff, real_expt, utc, infile): # exit if tdiff wasn't updated if tdiff == real_expt: msg='No adequate correction found for frame %i in file %s' % (ext, infile) raise SaltError(msg) return struct # calculate the new utc value try: ntime=salttime.sex2dec(utc) ntime=ntime-tdiff/3600.0 newutc=salttime.dec2sex(ntime) except Exception as e: msg='Could not update UTC in %i header of image %s because %s' % (ext, infile, e) raise SaltError(msg) return struct # update the headers if utc==saltsafekey.get('UTC-OBS', struct): expt_string='%5.4f' % real_expt td_string='%5.4f' % tdiff if not saltsafekey.found('DUTC', struct): try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile) saltsafekey.new('DWETIME', expt_string, 'Dwell Time', struct, infile) saltsafekey.new('DUTC', td_string, 'Change in UTC time', struct, infile) except Exception as e: msg='Could not update %i header of image %s because %s' % (ext, infile, e) raise SaltIOError(msg) else: try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile) saltsafekey.put('DWETIME', real_expt, struct, infile) saltsafekey.put('DUTC', tdiff, struct, infile) except Exception as e: msg='Could not update %i header of image %s because %s' % (ext, infile, e) raise SaltError(msg) else: raise SaltIOError('Frame missing from list of times') return struct
def readtimefix(hdu, dsteps=7, transtime=4e-3): """Update the hdu with the correct time for when the exposure started and add the READTIME keyword dsteps--the number of readouts to correct for transtime--the transfer time between each frame """ #check for if the data has already been processed if saltkey.found('READTIME', hdu): raise SaltIOError(' has already been processed') #determine the UTC time utctime = saltkey.get('UTC-OBS', hdu) timeobs = saltkey.get('TIME-OBS', hdu) dateobs = saltkey.get('DATE-OBS', hdu) exptime = float(saltkey.get('EXPTIME', hdu)) #add the readtime header saltkey.new("READTIME", utctime, 'Time of the readout of the frame', hdu) #correct the utctime--first switch to datetime to properly handle #dates around changes in hours y, m, d = dateobs.split('-') H, M, S = utctime.split(':') s = int(float(S)) ms = int(1e6 * (float(S) - s)) newtime = datetime.datetime(int(y), int(m), int(d), int(H), int(M), s, ms) #correct the datetime dtime = dsteps * (exptime + transtime) s = int(dtime) ms = int(1e6 * (dtime - s)) newtime = newtime - datetime.timedelta(0, s, ms) #update the headkeywords hdu.header["UTC-OBS"] = str(newtime.time()) saltkey.put("UTC-OBS", str(newtime.time()), hdu) saltkey.put("TIME-OBS", str(newtime.time()), hdu) saltkey.put("DATE-OBS", str(newtime.date()), hdu) return hdu
def readtimefix(hdu, dsteps=7, transtime=4e-3): """Update the hdu with the correct time for when the exposure started and add the READTIME keyword dsteps--the number of readouts to correct for transtime--the transfer time between each frame """ #check for if the data has already been processed if saltkey.found('READTIME', hdu): raise SaltIOError(' has already been processed') #determine the UTC time utctime=saltkey.get('UTC-OBS', hdu) timeobs=saltkey.get('TIME-OBS', hdu) dateobs=saltkey.get('DATE-OBS', hdu) exptime=float(saltkey.get('EXPTIME', hdu)) #add the readtime header saltkey.new("READTIME",utctime,'Time of the readout of the frame', hdu) #correct the utctime--first switch to datetime to properly handle #dates around changes in hours y,m,d=dateobs.split('-') H,M,S=utctime.split(':') s=int(float(S)) ms=int(1e6*(float(S)-s)) newtime=datetime.datetime(int(y),int(m),int(d),int(H),int(M),s,ms) #correct the datetime dtime=dsteps*(exptime+transtime) s=int(dtime) ms=int(1e6*(dtime-s)) newtime=newtime-datetime.timedelta(0, s, ms) #update the headkeywords hdu.header.update("UTC-OBS", str(newtime.time())) saltkey.put("UTC-OBS", str(newtime.time()), hdu) saltkey.put("TIME-OBS", str(newtime.time()), hdu) saltkey.put("DATE-OBS", str(newtime.date()), hdu) return hdu
def illum_cor(struct, mbox): """Apply an illumination correction to a set of images return struct """ # Determine the number of extensions nextend = len(struct) #flat field the data for i in range(nextend): if struct[i].name == 'SCI' or len(struct) == 1: #create the median image mdata = median_filter(struct[i].data, size=(mbox, mbox)) #create the data struct[i].data = struct[i].data / mdata #Account for variance frames if saltkey.found('VAREXT', struct[i]): varext = saltkey.get('VAREXT', struct[i]) struct[varext].data = struct[varext].data / mdata return struct
def quickclean(filename, interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" print filename #create the input file name status=0 infile=os.path.basename(filename) rawpath=os.path.dirname(filename) outpath='./' outfile=outpath+'mbxp'+infile print infile, rawpath, outpath #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return #set up the files needed if infile[0]=='P': gaindb = iraf.osfn('pysalt$data/rss/RSSamps.dat') xtalkfile = iraf.osfn('pysalt$data/rss/RSSxtalk.dat') geomfile = iraf.osfn('pysalt$data/rss/RSSgeom.dat') elif infile[0]=='S': gaindb = iraf.osfn('pysalt$data/scam/SALTICAMamps.dat') xtalkfile = iraf.osfn('pysalt$data/scam/SALTICAMxtalk.dat') geomfile = iraf.osfn('pysalt$data/scam/SALTICAMgeom.dat') #verify the file struct=saltio.openfits(rawpath+'/'+infile) struct.verify('exception') #check to see if detmode is there if not saltkey.found('DETMODE', struct[0]): return #reduce the file struct=prepare(struct, createvar=False, badpixelstruct=None) #reset the names in the structures for i in range(1,len(struct)): struct[i].name=struct[i].header['EXTNAME'] #gain correct the files usedb=True dblist= saltio.readgaindb(gaindb) log=open(logfile, 'a') ampccd = struct[0].header['NAMPS'] / struct[0].header['NCCDS'] struct=gain(struct, mult=True,usedb=usedb, dblist=dblist, ampccd=ampccd, log=None, verbose=verbose) struct=bias(struct, subover=True,trim=True,subbias=False, median=False,function='polynomial',order=5,rej_lo=3,rej_hi=3,niter=10, plotover=False,log=None, verbose=verbose) if struct[0].header['CCDTYPE']=='OBJECT' and struct[0].header['EXPTIME']>90: struct = multicrclean(struct, crtype='median', thresh=5, mbox=5, bbox=25, bthresh=5, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=5, log=None, verbose=verbose) pinfile=outpath+'bxp'+infile saltio.writefits(struct, pinfile, clobber) saltred.saltmosaic(images=pinfile, outimages='',outpref=outpath+'m',geomfile=geomfile, fill=True, interp=interp,cleanup=cleanup,clobber=clobber,logfile=logfile, verbose=verbose) profile=outpath+'mbxp'+infile #remove intermediate steps if cleanup: if os.path.isfile(pinfile): os.remove(pinfile) return
def quickclean(filename, interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" print filename #create the input file name status = 0 infile = os.path.basename(filename) rawpath = os.path.dirname(filename) outpath = './' outfile = outpath + 'mbxp' + infile print infile, rawpath, outpath #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return #set up the files needed if infile[0] == 'P': gaindb = iraf.osfn('pysalt$data/rss/RSSamps.dat') xtalkfile = iraf.osfn('pysalt$data/rss/RSSxtalk.dat') geomfile = iraf.osfn('pysalt$data/rss/RSSgeom.dat') elif infile[0] == 'S': gaindb = iraf.osfn('pysalt$data/scam/SALTICAMamps.dat') xtalkfile = iraf.osfn('pysalt$data/scam/SALTICAMxtalk.dat') geomfile = iraf.osfn('pysalt$data/scam/SALTICAMgeom.dat') #verify the file struct = saltio.openfits(rawpath + '/' + infile) struct.verify('exception') #check to see if detmode is there if not saltkey.found('DETMODE', struct[0]): return #reduce the file struct = prepare(struct, createvar=False, badpixelstruct=None) #reset the names in the structures for i in range(1, len(struct)): struct[i].name = struct[i].header['EXTNAME'] #gain correct the files usedb = True dblist = saltio.readgaindb(gaindb) log = open(logfile, 'a') ampccd = struct[0].header['NAMPS'] / struct[0].header['NCCDS'] struct = gain(struct, mult=True, usedb=usedb, dblist=dblist, ampccd=ampccd, log=None, verbose=verbose) struct = bias(struct, subover=True, trim=True, subbias=False, median=False, function='polynomial', order=5, rej_lo=3, rej_hi=3, niter=10, plotover=False, log=None, verbose=verbose) if struct[0].header[ 'CCDTYPE'] == 'OBJECT' and struct[0].header['EXPTIME'] > 90: struct = multicrclean(struct, crtype='median', thresh=5, mbox=5, bbox=25, bthresh=5, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=5, log=None, verbose=verbose) pinfile = outpath + 'bxp' + infile saltio.writefits(struct, pinfile, clobber) saltred.saltmosaic(images=pinfile, outimages='', outpref=outpath + 'm', geomfile=geomfile, fill=True, interp=interp, cleanup=cleanup, clobber=clobber, logfile=logfile, verbose=verbose) profile = outpath + 'mbxp' + infile #remove intermediate steps if cleanup: if os.path.isfile(pinfile): os.remove(pinfile) return
def multicrclean(struct, crtype='fast', thresh=5, mbox=5, bbox=11, bthresh=3, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=1, update=True, log=None, verbose=True): """MULTICRCLEAN cleans SALT-like data of cosmic rays. The user has three different choices for the type of cosmic ray cleaning being fast, median, and edge. The process is set up to use multithreading for quick processing of the data. crytpe--type of cosmic ray cleaning. Either fast, median, or thresh--threshold for detecting cosmic rays mbox--box for median cleaning bbox--background box for median measurement bthresh--threshold for iterating on background calculation flux_ratio--ratio of fluxes for 'fast' method gain--gain of images--set to None to read in from header rdnoise--read noise of images--set to None to read in from header bfactor--block replication factor for 'edge' method fthresh--threshold for excluding compact sources (edge only) gbox--Window size to grow sources. gbox=0 for no growth of cosmic rays maxiter--maximum number of iterations return struct """ #setup the image name infile = saltkey.getimagename(struct[0]) #count the CR totcr = 0 #print out the header for the log if log: message = '%28s %11s' % ('HDU', 'COSMICRAYS') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) task_list = [] nproc = 0 for hdu in struct: if hdu.name == 'SCI': task_list.append( (hdu.data, crtype, thresh, mbox, bbox, bthresh, flux_ratio, gain, rdnoise, bfactor, fthresh, gbox, maxiter)) nproc += 1 #set up the multi-thread p = mp.Pool() results = [p.apply_async(cleancosmicrays, i) for i in task_list] p.close() for i, hdu in enumerate(struct): if hdu.name == 'SCI': #set up the cosmic ray array crarr = results[i - 1].get() #update the frame for the various values mask = (crarr > 0) if update: struct[i].data[mask] = crarr[mask] #track the number of cosmic rays ncr = mask.sum() totcr += ncr #if verbose print out information if log: message = '%25s[%1d] %i' % (infile, i, ncr) log.message(message, with_header=False, with_stdout=verbose) #correct the BPM frame if saltkey.found('BPMEXT', struct[i]): b_i = saltkey.get('BPMEXT', struct[i]) try: struct[b_i].data[mask] = 1 except Exception, e: msg = 'Cannot update the BPM frame in %s[%i] because %s' % ( infile, b_i, e) raise SaltError(msg)
def wavemap( hdu, soldict, caltype="line", function="poly", order=3, blank=0, nearest=False, array_only=False, clobber=True, log=None, verbose=True, ): """Read in an image and a set of wavlength solutions. Calculate the best wavelength solution for a given dataset and then apply that data set to the image return """ # set up the time of the observation dateobs = saltkey.get("DATE-OBS", hdu[0]) utctime = saltkey.get("TIME-OBS", hdu[0]) exptime = saltkey.get("EXPTIME", hdu[0]) instrume = saltkey.get("INSTRUME", hdu[0]).strip() grating = saltkey.get("GRATING", hdu[0]).strip() if caltype == "line": grang = saltkey.get("GRTILT", hdu[0]) arang = saltkey.get("CAMANG", hdu[0]) else: grang = saltkey.get("GR-ANGLE", hdu[0]) arang = saltkey.get("AR-ANGLE", hdu[0]) filtername = saltkey.get("FILTER", hdu[0]).strip() slitname = saltkey.get("MASKID", hdu[0]) slit = st.getslitsize(slitname) xbin, ybin = saltkey.ccdbin(hdu[0]) timeobs = sr.enterdatetime("%s %s" % (dateobs, utctime)) # check to see if there is more than one solution if caltype == "line": if len(soldict) == 1: sol = soldict.keys()[0] slitid = None if not sr.matchobservations(soldict[sol], instrume, grating, grang, arang, filtername, slitid): msg = "Observations do not match setup for transformation but using the solution anyway" if log: log.warning(msg) for i in range(1, len(hdu)): if hdu[i].name == "SCI": if log: log.message("Correcting extension %i" % i) istart = int(0.5 * len(hdu[i].data)) # open up the data # set up the xarr and initial wavlength solution xarr = np.arange(len(hdu[i].data[istart]), dtype="int64") # get the slitid try: slitid = saltkey.get("SLITNAME", hdu[i]) except: slitid = None # check to see if wavext is already there and if so, then check update # that for the transformation from xshift to wavelength if saltkey.found("WAVEXT", hdu[i]): w_ext = saltkey.get("WAVEXT", hdu[i]) - 1 wavemap = hdu[w_ext].data function, order, coef = sr.findlinesol( soldict, istart, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slitid, xarr, ) ws = WavelengthSolution.WavelengthSolution(xarr, xarr, function=function, order=order) ws.set_coef(coef) for j in range(len(hdu[i].data)): wavemap[j, :] = ws.value(wavemap[j, :]) if array_only: return wavemap hdu[w_ext].data = wavemap continue # set up a wavelength solution -- still in here for testing MOS data try: w_arr = sr.findsol( xarr, soldict, istart, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order, ) except SALTSpecError as e: if slitid: msg = "SLITID %s: %s" % (slitid, e) if log: log.warning(msg) continue else: raise SALTSpecError(e) if w_arr is None: w_arr = sr.findsol( xarr, soldict, istart, "rss", nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order, ) # for each line in the data, determine the wavelength solution # for a given line in the image wavemap = np.zeros_like(hdu[i].data) for j in range(len(hdu[i].data)): # find the wavelength solution for the data w_arr = sr.findsol( xarr, soldict, j, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order, ) if w_arr is not None: wavemap[j, :] = w_arr if array_only: return wavemap # write out the oimg hduwav = fits.ImageHDU(data=wavemap, header=hdu[i].header, name="WAV") hdu.append(hduwav) saltkey.new("WAVEXT", len(hdu) - 1, "Extension for Wavelength Map", hdu[i]) return hdu
try: ntime = salttime.sex2dec(utc) ntime = ntime - tdiff / 3600.0 newutc = salttime.dec2sex(ntime) except Exception, e: msg = 'Could not update UTC in %i header of image %s because %s' % ( ext, infile, e) raise SaltError(msg) return struct # update the headers if utc == saltsafekey.get('UTC-OBS', struct): expt_string = '%5.4f' % real_expt td_string = '%5.4f' % tdiff if not saltsafekey.found('DUTC', struct): try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile) saltsafekey.new('DWETIME', expt_string, 'Dwell Time', struct, infile) saltsafekey.new('DUTC', td_string, 'Change in UTC time', struct, infile) except Exception, e: msg = 'Could not update %i header of image %s because %s' % ( ext, infile, e) raise SaltIOError(msg) else: try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile)
def multicrclean(struct, crtype='fast', thresh=5, mbox=5, bbox=11, bthresh=3, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=1, update=True, log=None, verbose=True): """MULTICRCLEAN cleans SALT-like data of cosmic rays. The user has three different choices for the type of cosmic ray cleaning being fast, median, and edge. The process is set up to use multithreading for quick processing of the data. crytpe--type of cosmic ray cleaning. Either fast, median, or thresh--threshold for detecting cosmic rays mbox--box for median cleaning bbox--background box for median measurement bthresh--threshold for iterating on background calculation flux_ratio--ratio of fluxes for 'fast' method gain--gain of images--set to None to read in from header rdnoise--read noise of images--set to None to read in from header bfactor--block replication factor for 'edge' method fthresh--threshold for excluding compact sources (edge only) gbox--Window size to grow sources. gbox=0 for no growth of cosmic rays maxiter--maximum number of iterations return struct """ #setup the image name infile=saltkey.getimagename(struct[0]) #count the CR totcr=0 #print out the header for the log if log: message = '%28s %11s' % ('HDU','COSMICRAYS') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) task_list=[] nproc=0 for hdu in struct: if hdu.name=='SCI': task_list.append((hdu.data, crtype, thresh, mbox, bbox, bthresh, flux_ratio, gain, rdnoise, bfactor , fthresh, gbox, maxiter)) nproc+=1 #set up the multi-thread p=mp.Pool() results=[p.apply_async(cleancosmicrays, i) for i in task_list] p.close() for i, hdu in enumerate(struct): if hdu.name=='SCI': #set up the cosmic ray array crarr=results[i-1].get() #update the frame for the various values mask=(crarr>0) if update: struct[i].data[mask]=crarr[mask] #track the number of cosmic rays ncr=mask.sum() totcr += ncr #if verbose print out information if log: message='%25s[%1d] %i' % (infile, i, ncr) log.message(message, with_header=False, with_stdout=verbose) #correct the BPM frame if saltkey.found('BPMEXT', struct[i]): b_i=saltkey.get('BPMEXT', struct[i]) try: struct[b_i].data[mask]=1 except Exception, e: msg='Cannot update the BPM frame in %s[%i] because %s' % (infile, b_i, e) raise SaltError(msg)
def xtalk(struct,xcoeff,namps=2, log=None,verbose=False): """xtalk cross-talk corrects the amplifies. It takes """ infile=saltkey.getimagename(struct[0]) # how many extensions? nsciext = saltkey.get('NSCIEXT',struct[0]) nextend = saltkey.get('NEXTEND',struct[0]) nccd = saltkey.get('NCCDS',struct[0]) # how many amplifiers?--this is hard wired amplifiers = namps * nccd #setup the log if log: message='%28s %23s' % ('HDU','Correction') message += '\n ----------------------------------------------' log.message(message, with_header=False, with_stdout=verbose) #Loop over the image extensions and subtract one #set from the other--still hardwired at 2 for i in range(1,nsciext, 2): if struct[i].name=='SCI' and struct[i+1].name=='SCI': #set up the first amplifier dat1=struct[i].data.copy() ext1=saltkey.get('EXTVER', struct[i]) if xcoeff: j=(ext1-1)%amplifiers xc1=float(xcoeff[j]) else: xc1=1.0/saltkey.get('XTALK', struct[i]) #set up the second amplifier dat2=struct[i+1].data.copy() ext2=saltkey.get('EXTVER', struct[i+1]) if xcoeff: j=(ext2-1)%amplifiers xc2=float(xcoeff[j]) else: xc2=1.0/saltkey.get('XTALK', struct[i+1]) #subtract one from the other struct[i].data=struct[i].data-xc2*dat2[:,::-1] struct[i+1].data=struct[i+1].data-xc1*dat1[:,::-1] #correct the variance frame if saltkey.found('VAREXT', struct[i]): vhdu1=saltkey.get('VAREXT', struct[i]) vhdu2=saltkey.get('VAREXT', struct[i+1]) try: struct[vhdu1].data+=xc2*struct[vhdu2].data struct[vhdu2].data+=xc1*struct[vhdu1].data except Exception, e: msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu1, e) raise SaltError(msg) #print the message if log: message = '%25s[%1d] Amp%1d - Amp%1d * %8.6f' % \ (infile, i, ext1, ext2, xc2) log.message(message, with_header=False, with_stdout=verbose) message = '%25s[%1d] Amp%1d - Amp%1d * %8.6f' % \ (infile, i+1, ext2, ext1, xc1) log.message(message, with_header=False, with_stdout=verbose)
def wavemap(hdu, soldict, caltype='line', function='poly', order=3, blank=0, nearest=False, array_only=False, clobber=True, log=None, verbose=True): """Read in an image and a set of wavlength solutions. Calculate the best wavelength solution for a given dataset and then apply that data set to the image return """ # set up the time of the observation dateobs = saltkey.get('DATE-OBS', hdu[0]) utctime = saltkey.get('TIME-OBS', hdu[0]) exptime = saltkey.get('EXPTIME', hdu[0]) instrume = saltkey.get('INSTRUME', hdu[0]).strip() grating = saltkey.get('GRATING', hdu[0]).strip() if caltype == 'line': grang = saltkey.get('GRTILT', hdu[0]) arang = saltkey.get('CAMANG', hdu[0]) else: grang = saltkey.get('GR-ANGLE', hdu[0]) arang = saltkey.get('AR-ANGLE', hdu[0]) filtername = saltkey.get('FILTER', hdu[0]).strip() slitname = saltkey.get('MASKID', hdu[0]) slit = st.getslitsize(slitname) xbin, ybin = saltkey.ccdbin(hdu[0]) timeobs = sr.enterdatetime('%s %s' % (dateobs, utctime)) # check to see if there is more than one solution if caltype == 'line': if len(soldict) == 1: sol = soldict.keys()[0] slitid = None if not sr.matchobservations(soldict[sol], instrume, grating, grang, arang, filtername, slitid): msg = 'Observations do not match setup for transformation but using the solution anyway' if log: log.warning(msg) for i in range(1, len(hdu)): if hdu[i].name == 'SCI': if log: log.message('Correcting extension %i' % i) istart = int(0.5 * len(hdu[i].data)) # open up the data # set up the xarr and initial wavlength solution xarr = np.arange(len(hdu[i].data[istart]), dtype='int64') # get the slitid try: slitid = saltkey.get('SLITNAME', hdu[i]) except: slitid = None #check to see if wavext is already there and if so, then check update #that for the transformation from xshift to wavelength if saltkey.found('WAVEXT', hdu[i]): w_ext = saltkey.get('WAVEXT', hdu[i]) - 1 wavemap = hdu[w_ext].data function, order, coef = sr.findlinesol( soldict, istart, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slitid, xarr) ws = WavelengthSolution.WavelengthSolution(xarr, xarr, function=function, order=order) ws.set_coef(coef) for j in range(len(hdu[i].data)): wavemap[j, :] = ws.value(wavemap[j, :]) if array_only: return wavemap hdu[w_ext].data = wavemap continue # set up a wavelength solution -- still in here for testing MOS data try: w_arr = sr.findsol(xarr, soldict, istart, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) except SALTSpecError as e: if slitid: msg = 'SLITID %s: %s' % (slitid, e) if log: log.warning(msg) continue else: raise SALTSpecError(e) if w_arr is None: w_arr = sr.findsol(xarr, soldict, istart, 'rss', nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) # for each line in the data, determine the wavelength solution # for a given line in the image wavemap = np.zeros_like(hdu[i].data) for j in range(len(hdu[i].data)): # find the wavelength solution for the data w_arr = sr.findsol(xarr, soldict, j, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) if w_arr is not None: wavemap[j, :] = w_arr if array_only: return wavemap # write out the oimg hduwav = fits.ImageHDU(data=wavemap, header=hdu[i].header, name='WAV') hdu.append(hduwav) saltkey.new('WAVEXT', len(hdu) - 1, 'Extension for Wavelength Map', hdu[i]) return hdu
def xtalk(struct, xcoeff, namps=2, log=None, verbose=False): """xtalk cross-talk corrects the amplifies. It takes """ infile = saltkey.getimagename(struct[0]) # how many extensions? nsciext = saltkey.get('NSCIEXT', struct[0]) nextend = saltkey.get('NEXTEND', struct[0]) nccd = saltkey.get('NCCDS', struct[0]) # how many amplifiers?--this is hard wired amplifiers = namps * nccd #setup the log if log: message = '%28s %23s' % ('HDU', 'Correction') message += '\n ----------------------------------------------' log.message(message, with_header=False, with_stdout=verbose) #Loop over the image extensions and subtract one #set from the other--still hardwired at 2 for i in range(1, nsciext, 2): if struct[i].name == 'SCI' and struct[i + 1].name == 'SCI': #set up the first amplifier dat1 = struct[i].data.copy() ext1 = saltkey.get('EXTVER', struct[i]) if xcoeff: j = (ext1 - 1) % amplifiers xc1 = float(xcoeff[j]) else: xc1 = 1.0 / saltkey.get('XTALK', struct[i]) #set up the second amplifier dat2 = struct[i + 1].data.copy() ext2 = saltkey.get('EXTVER', struct[i + 1]) if xcoeff: j = (ext2 - 1) % amplifiers xc2 = float(xcoeff[j]) else: xc2 = 1.0 / saltkey.get('XTALK', struct[i + 1]) #subtract one from the other struct[i].data = struct[i].data - xc2 * dat2[:, ::-1] struct[i + 1].data = struct[i + 1].data - xc1 * dat1[:, ::-1] #correct the variance frame if saltkey.found('VAREXT', struct[i]): vhdu1 = saltkey.get('VAREXT', struct[i]) vhdu2 = saltkey.get('VAREXT', struct[i + 1]) try: struct[vhdu1].data += xc2 * struct[vhdu2].data struct[vhdu2].data += xc1 * struct[vhdu1].data except Exception, e: msg = 'Cannot update the variance frame in %s[%i] because %s' % ( infile, vhdu1, e) raise SaltError(msg) #print the message if log: message = '%25s[%1d] Amp%1d - Amp%1d * %8.6f' % \ (infile, i, ext1, ext2, xc2) log.message(message, with_header=False, with_stdout=verbose) message = '%25s[%1d] Amp%1d - Amp%1d * %8.6f' % \ (infile, i+1, ext2, ext1, xc1) log.message(message, with_header=False, with_stdout=verbose)
def slotutcfix(images,update,outfile,ampperccd,ignorexp,droplimit,inter,plotdata,logfile,verbose,debug): with logging(logfile,debug) as log: # set up the variables utc_list = [] # is the input file specified? saltsafeio.filedefined('Input',images) # if the input file is a list, does it exist? if images[0] == '@': saltsafeio.listexists('Input',images) # parse list of input files and place them in order infiles=saltsafeio.listparse('Raw image',images,'','','') infiles.sort() # check input files exist saltsafeio.filesexist(infiles,'','r') # check to see if the output file exists and if so, clobber it if os.path.isfile(outfile): try: os.remove(outfile) except: raise SaltIOError('File ' + outfile + ' can not be removed') # open the outfile if outfile: try: fout=open(outfile,'w') except: raise SaltIOError('File ' + outfile + ' can not be opened') # get time of first exposure and basic information about the observations infile=infiles[0] struct=saltsafeio.openfits(infile) # check to make sure slotmode data detmode=saltsafekey.get('DETMODE',struct[0], infile) if detmode != 'Slot Mode': raise SaltIOError('Data are not Slot Mode Observations') # Check to see if SLOTUTCFIX has already been run # and print a warning if they have if saltsafekey.found('SLOTUTC', struct[0]): message='Data have already been processed by SLOTUTCFIX' log.warning(message) # check to make sure that it is the right version of the software scamver=saltsafekey.get('DETSWV', struct[0], infile) try: scamver=float(scamver.split('-')[-1]) if 4.42 <= scamver <= 5.00: pass else: raise SaltError('cannot currently correct this version of the SCAM software.') except: raise SaltError('Not able to read software version') # requested exposure time req_texp=saltsafekey.get('EXPTIME',struct[0],infile) # how many extensions? nextend=saltsafekey.get('NEXTEND',struct[0],infile) # how many amplifiers amplifiers=saltsafekey.get('NCCDS',struct[0],infile) amplifiers = int(ampperccd*float(amplifiers)) if ampperccd>0: nframes = nextend/amplifiers nstep=amplifiers else: nframes = nextend nstep=1 # how many total frame and unique times ntotal=nextend*len(infiles) nunique=len(infiles)*nframes-ignorexp+1 # Create arrays necessary for analysis id_arr=np.arange(nunique) utc_arr=np.zeros(nunique,dtype=float) # Read in each file and make a list of the UTC values if verbose: log.message('Reading in files to create list of UTC values.') j=0 for n,infile in enumerate(infiles): # Show progress if verbose: percent=100.*float(n)/float(len(infiles)) ctext='Percentage Complete: %.2f\r' % percent sys.stdout.write(ctext) sys.stdout.flush() struct=saltsafeio.openfits(infile) if not len(struct)-1==nextend: raise SaltIOError(infile,' has a different number of extensions from the first file') # Skip through the frames and read in the utc istart=1 if infile==infiles[0]: istart=ignorexp*amplifiers+1 for i in range(istart,len(struct), amplifiers): try: utc_list.append(saltsafekey.get('UTC-OBS', struct[i], infile)) utc_arr[j]=slottool.getobstime(struct[i], infile) j += 1 except Exception, e: raise SaltIOError('Unable to create array of UTC times. Please check the number of extensions in the files') # close FITS file saltsafeio.closefits(struct) # set up the other important arrays try: diff_arr=utc_arr.copy() diff_arr[1:]=diff_arr[1:]-utc_arr[:-1] diff_arr[0]=-1 dsec_arr=utc_arr-utc_arr.astype(int) except: raise SaltIOError('Unable to create timing arrays') # calculate the real exposure time if verbose: log.message('Calculating real exposure time.') real_expt, med_expt, t_start, t_arr, ysum_arr=calculate_realexptime(id_arr, utc_arr, dsec_arr, diff_arr, req_texp, utc_list) # plot the results if plotdata: if verbose: log.message('Plotting data.') plt.ion() plt.plot(t_arr,ysum_arr,linewidth=0.5,linestyle='-',marker='',color='b') plt.xlabel('Time (s)') plt.ylabel('Fit') # Calculate the corrrect values if verbose: log.message('Calculating correct values') i_start = abs(utc_arr-t_start).argmin() t_diff=utc_arr*0.0+real_expt nd=utc_arr*0.0 ndrop=0 for i in range(len(utc_arr)): if utc_arr[i] >= t_start: t_new=t_start+real_expt*(i-i_start+ndrop) t_diff[i]=utc_arr[i]-t_new while (t_diff[i]>real_expt and nd[i] < droplimit): nd[i]+= 1 t_new=t_start+real_expt*(i-i_start+ndrop+nd[i]) t_diff[i]=utc_arr[i]-t_new if (nd[i]<droplimit): ndrop += nd[i] else: t_new=t_start+real_expt*(i-i_start) t_diff[i]=utc_arr[i]-t_new while (t_diff[i]>real_expt and nd[i] < droplimit): nd[i]+= 1 t_new=t_start+real_expt*(i-i_start-nd[i]) t_diff[i]=utc_arr[i]-t_new # calculate the corrected timestamp by counting 6 record files forward and # 8 recored + unrecorded files back--or just 8*t_exp forward. # if the object is near the end of the run, then just replace it with # the correct value assuming no dropped exposures. # first make the array of new times new_arr=utc_arr-t_diff # Next loop through them to find the corrected time corr_arr=utc_arr*0.0 for i in range(len(new_arr)): if i+6 < len(new_arr)-1: corr_arr[i]=new_arr[i+6]-8*real_expt else: corr_arr[i]=new_arr[i]-2*real_expt t_diff=utc_arr-corr_arr # write out the first results msg="Dwell Time=%5.3f Requested Exposure Time=%5.3f Nobs = %i Dropped = %i" % (real_expt, req_texp, nunique, ndrop) if verbose: log.message(msg) if outfile: fout.write('#'+msg+'\n') fout.write('#%23s %2s %12s %12s %10s %8s %4s \n' % ('File', 'N', 'UTC_old', 'UTC_new', 'UTC_new(s)', 'Diff', 'drop' )) # Give the user a chance to update the value if inter: message='Update headers with a dwell time of %5.3f s [y/n]? ' % real_expt update=saltsafeio.yn_ask(message) if not update: message='Set Dwell Time manually [y/n]? ' update=saltsafeio.yn_ask(message) if update: message='New Dwell Time: ' real_expt=saltsafeio.ask(message) try: real_expt=float(real_expt) except Exception, e: msg='Could not set user dwell time because %s' % e raise SaltError(msg)
def hducombine(hdu_list, outhdu, ext, method='average', datasec=None, reject=None, mask=False, weight=False, scale=None, statsec=None, blank=0, lthresh=3, hthresh=3): """Combine a set of images in imlist """ #set up i as the extionsion variable as shortcut i=ext nimages=len(hdu_list) #set all the data arrays data_list=[] gain=np.zeros(nimages) rdnoise=np.zeros(nimages) varext=None bpmext=None #set the datasec in case it is none as the full image if datasec is None: sh=outhdu[ext].data.shape y1,x1=(0,0) y2,x2=outhdu[i].data.shape else: y1,y2,x1,x2=datasec dshape=outhdu[i].data[y1:y2,x1:x2].shape dtype=outhdu[i].data[y1:y2,x1:x2].dtype wei_arr=outhdu[i].data[y1:y2,x1:x2]*0.0 #check for variance frame if saltkey.found('VAREXT', outhdu[i]) and weight: varext=saltkey.get('VAREXT', outhdu[i]) var_list=[] #check for bad pixel mask if saltkey.found('BPMEXT', outhdu[i]) and mask: bpmext=saltkey.get('BPMEXT',outhdu[i]) bpm_list=[] #create the lists of arrays and scale the arrays if requests mean_scale=0 data_arr=np.zeros((nimages, dshape[0], dshape[1]), dtype=dtype) for j in range(nimages): data_arr[j,:,:]=hdu_list[j][i].data[y1:y2,x1:x2] #calculate the scale if scale: scale_val=CalculateScale(data_arr, scale, statsec) mean_scale += scale_val else: scale_val=1 mean_scale+=1 if varext: var_list.append(hdu_list[j][varext].data[y1:y2,x1:x2]/scale_val) if bpmext: bpm_list.append(hdu_list[j][bpmext].data[y1:y2,x1:x2]) #get the gain and rdnoise if reject=='ccdclip': if saltkey.found('GAINMULT', hdu_list[j][i]): gain[j]=1 else: gain[j]=saltkey.get('GAIN', hdu_list[j][i]) rdnoise[j]=saltkey.get('RDNOISE', hdu_list[j][i]) #convert the lists to arrays if varext: var_arr=np.array(var_list) ivar_arr=1.0/var_arr else: var_arr=None ivar_arr=None if bpmext: bpm_arr=np.array(bpm_list) else: bpm_arr=None #reject outliers if set bpm_arr=RejectArray(data_arr, reject=reject, var=var_arr, bpm=bpm_arr, \ lthresh=lthresh, hthresh=hthresh, gain=gain, rdnoise=rdnoise) #calculate the average values outdata, outwei=CombineArray(data_arr, method=method, ivar=ivar_arr, bpm=bpm_arr) outhdu[i].data[y1:y2,x1:x2]=outdata if scale is not None: mean_scale = mean_scale/nimages outhdu[i].data[y1:y2,x1:x2] *= mean_scale #create the combine variance frame if varext: outhdu[varext].data[y1:y2,x1:x2], tmp_arr=CombineArray(var_arr, method=method, ivar=ivar_arr, bpm=bpm_arr) del tmp_arr if scale is not None: outhdu[varext].data *= mean_scale #check to see if any of the pixels have no values and replace with blank #if wei_arr.any()==0: # wmask=(wei_arr==0) # outhdu[i].data[wmask]=blank #create the combine BPM frames if bpmext: outhdu[bpmext].data=1.0*(wei_arr==0) return outhdu
def rectify(hdu, soldict, caltype='line', function='poly', order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None, blank=0, pixscale=0.0, time_interp=False, conserve=False, nearest=False, clobber=True, log=None, verbose=True): """Read in an image and a set of wavlength solutions. Calculate the best wavelength solution for a given dataset and then apply that data set to the image return """ # set the basic values set_w1 = (w1 is None) set_w2 = (w2 is None) set_dw = (dw is None) set_nw = (nw is None) # set up the time of the observation dateobs = saltkey.get('DATE-OBS', hdu[0]) utctime = saltkey.get('TIME-OBS', hdu[0]) exptime = saltkey.get('EXPTIME', hdu[0]) instrume = saltkey.get('INSTRUME', hdu[0]).strip() grating = saltkey.get('GRATING', hdu[0]).strip() if caltype == 'line': grang = saltkey.get('GRTILT', hdu[0]) arang = saltkey.get('CAMANG', hdu[0]) else: grang = saltkey.get('GR-ANGLE', hdu[0]) arang = saltkey.get('AR-ANGLE', hdu[0]) filtername = saltkey.get('FILTER', hdu[0]).strip() slitname = saltkey.get('MASKID', hdu[0]) slit = st.getslitsize(slitname) xbin, ybin = saltkey.ccdbin(hdu[0]) timeobs = enterdatetime('%s %s' % (dateobs, utctime)) # check to see if there is more than one solution if caltype == 'line': if len(soldict) == 1: sol = soldict.keys()[0] slitid = None if not matchobservations( soldict[sol], instrume, grating, grang, arang, filtername, slitid): msg = 'Observations do not match setup for transformation but using the solution anyway' if log: log.warning(msg) for i in range(1, len(hdu)): if hdu[i].name == 'SCI': if log: log.message('Correcting extension %i' % i) istart = int(0.5 * len(hdu[i].data)) # open up the data # set up the xarr and initial wavlength solution xarr = np.arange(len(hdu[i].data[istart]), dtype='int64') # get the slitid try: slitid = saltkey.get('SLITNAME', hdu[i]) except: slitid = None # set up a wavelength solution try: w_arr = findsol(xarr, soldict, istart, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) except SALTSpecError as e: if slitid: msg = 'SLITID %s: %s' % (slitid, e) if log: log.warning(msg) continue else: raise SALTSpecError(e) if w_arr is None: w_arr = findsol(xarr, soldict, istart, 'rss', nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) # set up the output x-axis if set_w1: w1 = w_arr.min() if set_w2: w2 = w_arr.max() if set_nw: nw = len(xarr) if set_dw: dw = float(w2 - w1) / nw nw_arr = createoutputxaxis(w1, w2, nw) # setup the VARIANCE and BPM frames if saltkey.found('VAREXT', hdu[i]): varext = saltkey.get('VAREXT', hdu[i]) else: varext = None # setup the BPM frames if saltkey.found('BPMEXT', hdu[i]): bpmext = saltkey.get('BPMEXT', hdu[i]) else: bpmext = None # for each line in the data, determine the wavelength solution # for a given line in the image for j in range(len(hdu[i].data)): # find the wavelength solution for the data w_arr = findsol(xarr, soldict, j, caltype, nearest, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order) # apply that wavelength solution to the data if w_arr is not None: try: hdu[i].data[ j, :] = st.interpolate( nw_arr, w_arr, hdu[i].data[ j, :], inttype, left=blank, right=blank) except Exception as e: hdu[i].data[j, :] = hdu[i].data[j, :] * 0.0 + blank msg = 'In row %i, solution cannot be found due to %s' % ( i, e) # correct the variance frame if varext: try: hdu[varext].data[ j, :] = st.interpolate( nw_arr, w_arr, hdu[varext].data[ j, :], inttype, left=blank, right=blank) except Exception as e: msg = 'In row %i, solution cannot be found due to %s' % ( i, e) # correct the BPM frame if bpmext: try: hdu[bpmext].data[ j, :] = st.interpolate( nw_arr, w_arr, hdu[bpmext].data[ j, :], inttype, left=blank, right=blank) except Exception as e: msg = 'In row %i, solution cannot be found due to %s' % ( i, e) else: hdu[i].data[j, :] = hdu[i].data[j, :] * 0.0 + blank if conserve: hdu[i].data = hdu[i].data / dw if varext: hdu[varext].data = hdu[varext].data / dw # Add WCS information saltkey.new('CTYPE1', 'LAMBDA', 'Coordinate Type', hdu[i]) saltkey.new('CTYPE2', 'PIXEL', 'Coordinate Type', hdu[i]) saltkey.new( 'CD1_1', dw, 'WCS: Wavelength Dispersion in angstrom/pixel', hdu[i]) saltkey.new('CD2_1', 0.0, 'WCS: ', hdu[i]) saltkey.new('CD1_2', 0.0, 'WCS: ', hdu[i]) saltkey.new('CD2_2', ybin * pixscale, 'WCS: ', hdu[i]) saltkey.new('CRPIX1', 0.0, 'WCS: X Reference pixel', hdu[i]) saltkey.new('CRPIX2', 0.0, 'WCS: Y Reference pixel', hdu[i]) saltkey.new('CRVAL1', w1, 'WCS: X Reference pixel', hdu[i]) saltkey.new('CRVAL2', 0.0, 'WCS: Y Reference pixel', hdu[i]) saltkey.new('CDELT1', 1.0, 'WCS: X pixel size', hdu[i]) saltkey.new('CDELT2', 1.0, 'WCS: Y pixel size', hdu[i]) saltkey.new('DC-FLAG', 0, 'Dispesion Corrected image', hdu[i]) return hdu
def gain(struct, mult=True, usedb=False, dblist=None, ampccd=2, log=None, verbose=True): """gain processes a image hduList and gain corrects each amplifier. It can either use gain settings in the header or those supplied in a config file which would be suppleid in the dblist (see helpfile for structure of the config file). If variance frames exist, it will update those for changes in the header value as well. In the end, it will update the gain with a value of one signfing the data has been transformed into e- from ADU The program will look for the non-linear gain settings which are given by: e = GAIN*(1 + GAIN1*E-6*ADU)*ADU mult--if true, multiple the gains usedb--use the values in the dblist, if false use the header values dblist--values for the gain and readnoise from the ampccd--number of amplifiers per ccd dblist should have the following lists: speed, rate, gain, noise, bias, amp """ #get the infile name infile = saltkey.getimagename(struct[0]) #how many science extensions nsciext = saltkey.get('NSCIEXT', struct[0]) #how many data extensions nextend = saltkey.get('NSCIEXT', struct[0]) # how many amplifiers? amplifiers = ampccd * saltkey.get('NCCDS', struct[0]) #read the gain and rospeed for the image gainset = saltkey.get('GAINSET', struct[0]) rospeed = saltkey.get('ROSPEED', struct[0]) #loop through each amplifier and gain correct it if log: message = '%28s %6s %5s %3s %5s %5s' \ % ('HDU','GAIN','SPEED','AMP','GAIN','NOISE') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) for i in range(nsciext): hdu = i + 1 amp = i % amplifiers + 1 #get the gain and rdnoise values for the array if usedb: gain, rdnoise = get_values(dblist, gainset, rospeed, amp) gain1 = 0 else: gain = saltkey.get('GAIN', struct[hdu]) rdnoise = saltkey.get('RDNOISE', struct[hdu]) try: gain1 = saltkey.get('GAIN1', struct[hdu]) except: gain1 = 0 if mult: #correct the gain gainmult = 1 try: data = struct[hdu].data struct[hdu].data = gain * data + gain1 * data**2 except Exception, e: msg = 'Cannot gain correct %s[%i] because %s' % (infile, hdu, e) raise SaltError(msg) #correct the variance frame if saltkey.found('VAREXT', struct[hdu]): vhdu = saltkey.get('VAREXT', struct[hdu]) try: vdata = struct[vhdu].data struct[vhdu].data = vdata * gain * ( 1 + 2 * gain1 * 1e-6 * data) except Exception, e: msg = 'Cannot update the variance frame in %s[%i] because %s' % ( infile, vhdu, e) raise SaltError(msg)
def slotutcfix(images, update, outfile, ampperccd, ignorexp, droplimit, inter, plotdata, logfile, verbose, debug): with logging(logfile, debug) as log: # set up the variables utc_list = [] # is the input file specified? saltsafeio.filedefined('Input', images) # if the input file is a list, does it exist? if images[0] == '@': saltsafeio.listexists('Input', images) # parse list of input files and place them in order infiles = saltsafeio.listparse('Raw image', images, '', '', '') infiles.sort() # check input files exist saltsafeio.filesexist(infiles, '', 'r') # check to see if the output file exists and if so, clobber it if os.path.isfile(outfile): try: os.remove(outfile) except: raise SaltIOError('File ' + outfile + ' can not be removed') # open the outfile if outfile: try: fout = open(outfile, 'w') except: raise SaltIOError('File ' + outfile + ' can not be opened') # get time of first exposure and basic information about the observations infile = infiles[0] struct = saltsafeio.openfits(infile) # check to make sure slotmode data detmode = saltsafekey.get('DETMODE', struct[0], infile) if detmode != 'Slot Mode': raise SaltIOError('Data are not Slot Mode Observations') # Check to see if SLOTUTCFIX has already been run # and print a warning if they have if saltsafekey.found('SLOTUTC', struct[0]): message = 'Data have already been processed by SLOTUTCFIX' log.warning(message) # check to make sure that it is the right version of the software scamver = saltsafekey.get('DETSWV', struct[0], infile) try: scamver = float(scamver.split('-')[-1]) if 4.42 <= scamver <= 5.00: pass else: raise SaltError( 'cannot currently correct this version of the SCAM software.' ) except: raise SaltError('Not able to read software version') # requested exposure time req_texp = saltsafekey.get('EXPTIME', struct[0], infile) # how many extensions? nextend = saltsafekey.get('NEXTEND', struct[0], infile) # how many amplifiers amplifiers = saltsafekey.get('NCCDS', struct[0], infile) amplifiers = int(ampperccd * float(amplifiers)) if ampperccd > 0: nframes = nextend / amplifiers nstep = amplifiers else: nframes = nextend nstep = 1 # how many total frame and unique times ntotal = nextend * len(infiles) nunique = len(infiles) * nframes - ignorexp + 1 # Create arrays necessary for analysis id_arr = np.arange(nunique) utc_arr = np.zeros(nunique, dtype=float) # Read in each file and make a list of the UTC values if verbose: log.message('Reading in files to create list of UTC values.') j = 0 for n, infile in enumerate(infiles): # Show progress if verbose: percent = 100. * float(n) / float(len(infiles)) ctext = 'Percentage Complete: %.2f\r' % percent sys.stdout.write(ctext) sys.stdout.flush() struct = saltsafeio.openfits(infile) if not len(struct) - 1 == nextend: raise SaltIOError( infile, ' has a different number of extensions from the first file' ) # Skip through the frames and read in the utc istart = 1 if infile == infiles[0]: istart = ignorexp * amplifiers + 1 for i in range(istart, len(struct), amplifiers): try: utc_list.append( saltsafekey.get('UTC-OBS', struct[i], infile)) utc_arr[j] = slottool.getobstime(struct[i], infile) j += 1 except Exception, e: raise SaltIOError( 'Unable to create array of UTC times. Please check the number of extensions in the files' ) # close FITS file saltsafeio.closefits(struct) # set up the other important arrays try: diff_arr = utc_arr.copy() diff_arr[1:] = diff_arr[1:] - utc_arr[:-1] diff_arr[0] = -1 dsec_arr = utc_arr - utc_arr.astype(int) except: raise SaltIOError('Unable to create timing arrays') # calculate the real exposure time if verbose: log.message('Calculating real exposure time.') real_expt, med_expt, t_start, t_arr, ysum_arr = calculate_realexptime( id_arr, utc_arr, dsec_arr, diff_arr, req_texp, utc_list) # plot the results if plotdata: if verbose: log.message('Plotting data.') plt.ion() plt.plot(t_arr, ysum_arr, linewidth=0.5, linestyle='-', marker='', color='b') plt.xlabel('Time (s)') plt.ylabel('Fit') # Calculate the corrrect values if verbose: log.message('Calculating correct values') i_start = abs(utc_arr - t_start).argmin() t_diff = utc_arr * 0.0 + real_expt nd = utc_arr * 0.0 ndrop = 0 for i in range(len(utc_arr)): if utc_arr[i] >= t_start: t_new = t_start + real_expt * (i - i_start + ndrop) t_diff[i] = utc_arr[i] - t_new while (t_diff[i] > real_expt and nd[i] < droplimit): nd[i] += 1 t_new = t_start + real_expt * (i - i_start + ndrop + nd[i]) t_diff[i] = utc_arr[i] - t_new if (nd[i] < droplimit): ndrop += nd[i] else: t_new = t_start + real_expt * (i - i_start) t_diff[i] = utc_arr[i] - t_new while (t_diff[i] > real_expt and nd[i] < droplimit): nd[i] += 1 t_new = t_start + real_expt * (i - i_start - nd[i]) t_diff[i] = utc_arr[i] - t_new # calculate the corrected timestamp by counting 6 record files forward and # 8 recored + unrecorded files back--or just 8*t_exp forward. # if the object is near the end of the run, then just replace it with # the correct value assuming no dropped exposures. # first make the array of new times new_arr = utc_arr - t_diff # Next loop through them to find the corrected time corr_arr = utc_arr * 0.0 for i in range(len(new_arr)): if i + 6 < len(new_arr) - 1: corr_arr[i] = new_arr[i + 6] - 8 * real_expt else: corr_arr[i] = new_arr[i] - 2 * real_expt t_diff = utc_arr - corr_arr # write out the first results msg = "Dwell Time=%5.3f Requested Exposure Time=%5.3f Nobs = %i Dropped = %i" % ( real_expt, req_texp, nunique, ndrop) if verbose: log.message(msg) if outfile: fout.write('#' + msg + '\n') fout.write('#%23s %2s %12s %12s %10s %8s %4s \n' % ('File', 'N', 'UTC_old', 'UTC_new', 'UTC_new(s)', 'Diff', 'drop')) # Give the user a chance to update the value if inter: message = 'Update headers with a dwell time of %5.3f s [y/n]? ' % real_expt update = saltsafeio.yn_ask(message) if not update: message = 'Set Dwell Time manually [y/n]? ' update = saltsafeio.yn_ask(message) if update: message = 'New Dwell Time: ' real_expt = saltsafeio.ask(message) try: real_expt = float(real_expt) except Exception, e: msg = 'Could not set user dwell time because %s' % e raise SaltError(msg)
def specslit(image, outimage, outpref, exttype='auto', slitfile='', outputslitfile='', regprefix='', sections=3, width=25, sigma=2.2, thres=6, order=3, padding=5, yoffset=0, inter=False, clobber=True, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # check all the input and make sure that all the input needed is provided # by the user # read the image or image list and check if each in the list exist infiles = saltio.argunpack('Input', image) # unpack the outfiles outfiles = saltio.listparse('Outimages', outimage, outpref, infiles, '') # from the extraction type, check whether the input file is specified. # if the slitfile parameter is specified then use the slit files for # the extraction. if the extraction type is auto then use image for the # detection and the slit extraction if exttype == 'rsmt' or exttype == 'fits' or exttype == 'ascii' or exttype == 'ds9': slitfiles = saltio.argunpack('Slitfile', slitfile) if len(slitfiles) == 1: slitfiles = slitfiles * len(infiles) saltio.comparelists(infiles, slitfiles, 'image', 'slitfile') elif exttype == 'auto': slitfiles = infiles log.message( 'Extraction type is AUTO. Slit detection will be done from image' ) # read in if an optional ascii file is requested if len(outputslitfile) > 0: outslitfiles = saltio.argunpack('Outslitfiles', outputslitfile) saltio.comparelists(infiles, outslitfiles, 'image', 'outputslitfile') else: outslitfiles = [''] * len(infiles) # check if the width and sigma parameters were specified. # default is 25 and 2.2 if width < 10.: msg = 'The width parameter needs be a value larger than 10' raise SALTSpecError(msg) if sigma < 0.0: msg = 'Sigma must be greater than zero' raise SaltSpecError(msg) # check the treshold parameter. this needs to be specified by the user if thres <= 0.0: msg = 'Threshold must be greater than zero' raise SaltSpecError(msg) # check to make sure that the sections are greater than the order if sections <= order: msg = 'Number of sections must be greater than the order for the spline fit' raise SaltSpecError(msg) # run through each of the images and extract the slits for img, oimg, sfile, oslit in zip(infiles, outfiles, slitfiles, outslitfiles): log.message('Proccessing image %s' % img) # open the image struct = saltio.openfits(img) ylen, xlen = struct[1].data.shape xbin, ybin = saltkey.ccdbin(struct[0], img) # setup the VARIANCE and BPM frames if saltkey.found('VAREXT', struct[1]): varext = saltkey.get('VAREXT', struct[1]) varlist = [] else: varext = None # setup the BPM frames if saltkey.found('BPMEXT', struct[1]): bpmext = saltkey.get('BPMEXT', struct[1]) bpmlist = [] else: bpmext = None # open the slit definition file or identify the slits in the image slitmask = None ycheck = False if exttype == 'rsmt': log.message('Using slits from %s' % sfile) if yoffset is None: yoffset = 0 ycheck = True slitmask = mt.read_slitmask_from_xml(sfile) xpos = -0.3066 ypos = 0.0117 cx = int(xlen / 2.0) cy = int(ylen / 2.0) + ypos / 0.015 / ybin + yoffset order, slit_positions = mt.convert_slits_from_mask( slitmask, order=1, xbin=xbin, ybin=ybin, pix_scale=0.1267, cx=cx, cy=cy) sections = 1 elif exttype == 'fits': log.message('Using slits from %s' % sfile) order, slit_positions = read_slits_from_fits(sfile) elif exttype == 'ascii': log.message('Using slits from %s' % sfile) order, slit_positions = mt.read_slits_from_ascii(sfile) elif exttype == 'ds9': log.message('Using slits from %s' % sfile) order, slit_positions, slitmask = mt.read_slits_from_ds9( sfile, order=order) slitmask = None sections = 1 elif exttype == 'auto': log.message('Identifying slits in %s' % img) # identify the slits in the image order, slit_positions = identify_slits(struct[1].data, order, sections, width, sigma, thres) # write out the slit identifications if ofile has been supplied if oslit: log.message('Writing slit positions to %s' % oslit) mt.write_outputslitfile(slit_positions, oslit, order) if ycheck: slit_positions, dy = check_ypos(slit_positions, struct[1].data) log.message('Using an offset of {}'.format(dy)) # extract the slits spline_x = mt.divide_image(struct[1].data, sections) spline_x = 0.5 * (np.array(spline_x[:-1]) + np.array(spline_x[1:])) extracted_spectra, spline_positions = mt.extract_slits( slit_positions, spline_x, struct[1].data, order=order, padding=padding) if varext: extracted_var, var_positions = mt.extract_slits( slit_positions, spline_x, struct[varext].data, order=order, padding=padding) if bpmext: extracted_bpm, bpm_positions = mt.extract_slits( slit_positions, spline_x, struct[bpmext].data, order=order, padding=padding) # write out the data to the new array # create the new file hdulist = fits.HDUList([struct[0]]) # log the extracted spectra if needed log.message('', with_stdout=verbose) # setup output ds9 file if regprefix: regout = open( regprefix + os.path.basename(img).strip('.fits') + '.reg', 'w') regout.write('# Region file format: DS9 version 4.1\n') regout.write('# Filename: %s\n' % img) regout.write( 'global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\nphysical\n' ) # add each imglist = [] nslits = len(spline_positions) for i in range(nslits): y1 = spline_positions[i][0].min() y2 = spline_positions[i][1].max() msg = 'Extracted Spectra %i between %i to %i' % (i + 1, y1, y2) # log.message(msg, with_header=False, with_stdout=verbose) sdu = fits.ImageHDU(extracted_spectra[i], header=struct[1].header) if varext: vdu = fits.ImageHDU(extracted_var[i], header=struct[varext].header) sdu.header['VAREXT'] = i + nslits + 1 varlist.append(vdu) if bpmext: bdu = fits.ImageHDU(extracted_bpm[i], header=struct[bpmext].header) sdu.header['BPMEXT'] = i + 2 * nslits + 1 bpmlist.append(bdu) imglist.append(sdu) # add in some additional keywords imglist[i].header['MINY'] = (y1, 'Lower Y value in original image') imglist[i].header['MAXY'] = (y2, 'Upper Y value in original image') if regprefix: xsize = struct[1].data.shape[1] xsize = int(0.5 * xsize) rtext = '' if slitmask: # rtext='%s, %8.7f, %8.7f, %3.2f' % (slitmask.slitlets.data[i]['name'], slitmask.slitlets.data[i]['targ_ra'], slitmask.slitlets.data[i]['targ_dec'], slitmask.slitlets.data[i]['slit_width']) pass regout.write('box(%i,%i, %i, %i) #text={%s}\n' % (xsize, 0.5 * (y1 + y2), 2 * xsize, y2 - y1, rtext)) # add slit information if slitmask: imglist[i].header['SLITNAME'] = ( slitmask.slitlets.data[i]['name'], 'Slit Name') imglist[i].header['SLIT_RA'] = ( slitmask.slitlets.data[i]['targ_ra'], 'Slit RA') imglist[i].header['SLIT_DEC'] = ( slitmask.slitlets.data[i]['targ_dec'], 'Slit DEC') imglist[i].header['SLIT'] = ( slitmask.slitlets.data[i]['slit_width'], 'Slit Width') # add to the hdulist hdulist += imglist if varext: hdulist += varlist if bpmext: hdulist += bpmlist # write the slit positions to the header # create the binary table HDU that contains the split positions tbhdu = mt.slits_HDUtable(slit_positions, order) bintable_hdr = tbhdu.header # add the extname parameter to the extension tbhdu.header['EXTNAME'] = 'BINTABLE' # add the extname parameter to the extension hdulist[0].header['SLITEXT'] = len(hdulist) hdulist.append(tbhdu) # add addition header information about the mask if slitmask: hdulist[0].header['MASKNAME'] = (slitmask.mask_name, 'SlitMask Name') hdulist[0].header['MASK_RA'] = (slitmask.center_ra, 'SlitMask RA') hdulist[0].header['MASK_DEC'] = (slitmask.center_dec, 'SlitMask DEC') hdulist[0].header['MASK_PA'] = (slitmask.position_angle, 'SlitMask Position Angle') # write out the image saltio.writefits(hdulist, oimg, clobber)
def saltxtalk(images, outimages, outpref, xtalkfile=None, usedb=False, clobber=True, logfile='salt.log', verbose=True): #start logging with logging(logfile, debug) as log: # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outfile', outimages, outpref, infiles, '') # are input and output lists the same length? saltio.comparelists(infiles, outfiles, 'Input', 'output') # does crosstalk coefficient data exist if usedb: xtalkfile = xtalkfile.strip() xdict = saltio.readxtalkcoeff(xtalkfile) else: xdict = None for img, oimg in zip(infiles, outfiles): #open the fits file struct = saltio.openfits(img) #find the best xcoeff for the image if using the db if usedb: obsdate = saltkey.get('DATE-OBS', struct[0]) obsdate = int('%s%s%s' % (obsdate[0:4], obsdate[5:7], obsdate[8:])) xkey = np.array(xdict.keys()) date = xkey[abs(xkey - obsdate).argmin()] xcoeff = xdict[date] else: xcoeff = [] # identify instrument instrume, keyprep, keygain, keybias, keyxtalk, keyslot = saltkey.instrumid( struct) # has file been prepared already? if saltkey.found(keyxtalk, struct[0]): message = '%s has already been xtalk corrected' % img raise SaltError(message) #apply the cross-talk correction struct = xtalk(struct, xcoeff, log=log, verbose=verbose) # housekeeping keywords fname, hist = history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) saltkey.housekeeping(struct[0], 'SXTALK', 'Images have been xtalk corrected', hist) # write FITS file saltio.writefits(struct, oimg, clobber=clobber) saltio.closefits(struct)
def bias(struct, subover=True, trim=True, subbias=False, bstruct=None, median=False, function='polynomial', order=3, rej_lo=3, rej_hi=3, niter=10, plotover=False, log=None, verbose=True): """Bias subtracts the bias levels from a frame. It will fit and subtract the overscan region, trim the images, and subtract a master bias if required. struct--image structure subover--subtract the overscan region trim--trim the image subbias--subtract master bias bstruct--master bias image structure median--use the median instead of mean in image statistics function--form to fit to the overscan region order--order for the function rej_lo--sigma of low points to reject in the fit rej_hi--sigma of high points to reject in the fit niter--number of iterations log--saltio log for recording information verbose--whether to print to stdout """ infile = saltkey.getimagename(struct[0]) # how many extensions? nsciext = saltkey.get('NSCIEXT', struct[0]) nextend = saltkey.get('NEXTEND', struct[0]) nccd = saltkey.get('NCCDS', struct[0]) # how many amplifiers?--this is hard wired amplifiers = 2 * nccd #log the process if subover and log: message = '%28s %7s %5s %4s %6s' % \ ('HDU','Overscan','Order','RMS','Niter') log.message( '\n --------------------------------------------------', with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' --------------------------------------------------', with_header=False, with_stdout=verbose) if (plotover): plt.figure(1) plt.axes([0.1, 0.1, 0.8, 0.8]) plt.xlabel('CCD Column') plt.ylabel('Pixel Counts (e-)') plt.ion() #loop through the extensions and subtract the bias for i in range(1, nsciext + 1): if struct[i].name == 'SCI': #get the bias section biassec = saltkey.get('BIASSEC', struct[i]) y1, y2, x1, x2 = saltio.getSection(biassec, iraf_format=True) #get the data section datasec = saltkey.get('DATASEC', struct[i]) dy1, dy2, dx1, dx2 = saltio.getSection(datasec, iraf_format=True) #setup the overscan region if subover: yarr = np.arange(y1, y2, dtype=float) data = struct[i].data odata = struct[i].data[y1:y2, x1:x2] if median: odata = np.median((struct[i].data[y1:y2, x1:x2]), axis=1) olevel = np.median((struct[i].data[y1:y2, x1:x2])) saltkey.new('OVERSCAN', '%f' % (olevel), 'Overscan median value', struct[i]) else: odata = np.mean((struct[i].data[y1:y2, x1:x2]), axis=1) olevel = np.mean((struct[i].data[y1:y2, x1:x2])) saltkey.new('OVERSCAN', '%f' % (olevel), 'Overscan mean value', struct[i]) #fit the overscan region ifit=saltfit.interfit(yarr, odata, function=function, \ order=order, thresh=rej_hi, niter=niter) try: ifit.interfit() coeffs = ifit.coef ofit = ifit(yarr) omean, omed, osigma = saltstat.iterstat((odata - ofit), sig=3, niter=5) except ValueError: #catch the error if it is a zero array ofit = np.array(yarr) * 0.0 osigma = 0.0 except TypeError: #catch the error if it is a zero array ofit = np.array(yarr) * 0.0 osigma = 0.0 #if it hasn't been already, convert image to #double format struct[i].data = 1.0 * struct[i].data try: struct[i].header.remove('BZERO') struct[i].header.remove('BSCALE') except: pass #subtract the overscan region for j in range(len(struct[i].data[0])): struct[i].data[y1:y2, j] -= ofit #report the information if log: message = '%25s[%1d] %8.2f %3d %7.2f %3d' % \ (infile, i, olevel, order, osigma, niter) log.message(message, with_stdout=verbose, with_header=False) #add the statistics to the image header saltkey.new('OVERRMS', '%f' % (osigma), 'Overscan RMS value', struct[i]) #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu = saltkey.get('VAREXT', struct[i]) try: vdata = struct[vhdu].data #The bias level should not be included in the noise from the signal for j in range(len(struct[i].data[0])): vdata[y1:y2, j] -= ofit #add a bit to make sure that the minimum error is the rednoise rdnoise = saltkey.get('RDNOISE', struct[i]) vdata[vdata < rdnoise**2] = rdnoise**2 struct[vhdu].data = vdata + osigma**2 except Exception, e: msg = 'Cannot update the variance frame in %s[%i] because %s' % ( infile, vhdu, e) raise SaltError(msg) #plot the overscan region if plotover: plt.plot(yarr, odata) plt.plot(yarr, ofit) #trim the data and update the headers if trim: struct[i].data = struct[i].data[dy1:dy2, dx1:dx2] datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 - dy1) + ']' saltkey.put('DATASEC', datasec, struct[i]) #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu = saltkey.get('VAREXT', struct[i]) struct[vhdu].data = struct[vhdu].data[dy1:dy2, dx1:dx2] datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 - dy1) + ']' saltkey.put('DATASEC', datasec, struct[vhdu]) #update the BPM frame if saltkey.found('BPMEXT', struct[i]): bhdu = saltkey.get('BPMEXT', struct[i]) struct[bhdu].data = struct[bhdu].data[dy1:dy2, dx1:dx2] datasec = '[1:' + str(dx2 - dx1) + ',1:' + str(dy2 - dy1) + ']' saltkey.put('DATASEC', datasec, struct[bhdu]) #subtract the master bias if necessary if subbias and bstruct: struct[i].data -= bstruct[i].data #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu = saltkey.get('VAREXT', struct[i]) try: vdata = struct[vhdu].data struct[vhdu].data = vdata + bstruct[vhdu].data except Exception, e: msg = 'Cannot update the variance frame in %s[%i] because %s' % ( infile, vhdu, e) raise SaltError(msg)
def crclean(struct, crtype='fast', thresh=5, mbox=5, bbox=11, bthresh=3, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=1, update=True, log=None, verbose=True): """CRCLEAN cleans SALT-like data of cosmic rays. The user has three different choices for the type of cosmic ray cleaning being fast, median, and edge. crytpe--type of cosmic ray cleaning. Either fast, median, or thresh--threshold for detecting cosmic rays mbox--box for median cleaning bbox--background box for median measurement bthresh--threshold for iterating on background calculation flux_ratio--ratio of fluxes for 'fast' method gain--gain of images--set to None to read in from header rdnoise--read noise of images--set to None to read in from header bfactor--block replication factor for 'edge' method fthresh--threshold for excluding compact sources (edge only) gbox--Window size to grow sources. gbox=0 for no growth of cosmic rays maxiter--maximum number of iterations return struct """ #setup the image name infile = saltkey.getimagename(struct[0]) #count the CR totcr = 0 #print out the header for the log if log: message = '%28s %11s' % ('HDU', 'COSMICRAYS') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) #cosmic ray clean each extension for i in range(len(struct)): #only clean the cosmic rays if it is a SCI extension or a single extension if struct[i].name == 'SCI' or len(struct) == 1: #for the edge method, get the gain and rdnoise from the fits header #if they are not set if crtype == 'edge': if gain == None: gain = saltkey.get('GAIN', struct[i]) if rdnoise == None: rdnoise = saltkey.get('RDNOISE', struct[i]) #get all the cosmic rays from an array crarr=cleancosmicrays(struct[i].data, crtype, thresh, mbox, bbox, bthresh, flux_ratio, \ gain, rdnoise, bfactor, fthresh, gbox, maxiter) #update the frame for the various values mask = (crarr > 0) if update: struct[i].data[mask] = crarr[mask] #track the number of cosmic rays ncr = mask.sum() totcr += ncr #if verbose print out information if log: message = '%25s[%1d] %i' % (infile, i, ncr) log.message(message, with_header=False, with_stdout=verbose) #correct the BPM frame if saltkey.found('BPMEXT', struct[i]): b_i = saltkey.get('BPMEXT', struct[i]) try: struct[b_i].data[mask] = 1 except Exception, e: msg = 'Cannot update the BPM frame in %s[%i] because %s' % ( infile, b_i, e) raise SaltError(msg)
def crclean(struct, crtype='fast', thresh=5, mbox=5, bbox=11, bthresh=3, flux_ratio=0.2, \ gain=1, rdnoise=5, bfactor=2, fthresh=5, gbox=0, maxiter=1, update=True, log=None, verbose=True): """CRCLEAN cleans SALT-like data of cosmic rays. The user has three different choices for the type of cosmic ray cleaning being fast, median, and edge. crytpe--type of cosmic ray cleaning. Either fast, median, or thresh--threshold for detecting cosmic rays mbox--box for median cleaning bbox--background box for median measurement bthresh--threshold for iterating on background calculation flux_ratio--ratio of fluxes for 'fast' method gain--gain of images--set to None to read in from header rdnoise--read noise of images--set to None to read in from header bfactor--block replication factor for 'edge' method fthresh--threshold for excluding compact sources (edge only) gbox--Window size to grow sources. gbox=0 for no growth of cosmic rays maxiter--maximum number of iterations return struct """ #setup the image name infile=saltkey.getimagename(struct[0]) #count the CR totcr=0 #print out the header for the log if log: message = '%28s %11s' % ('HDU','COSMICRAYS') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) #cosmic ray clean each extension for i in range(len(struct)): #only clean the cosmic rays if it is a SCI extension or a single extension if struct[i].name=='SCI' or len(struct)==1: #for the edge method, get the gain and rdnoise from the fits header #if they are not set if crtype=='edge': if gain==None: gain=saltkey.get('GAIN',struct[i]) if rdnoise==None: rdnoise=saltkey.get('RDNOISE',struct[i]) #get all the cosmic rays from an array crarr=cleancosmicrays(struct[i].data, crtype, thresh, mbox, bbox, bthresh, flux_ratio, \ gain, rdnoise, bfactor, fthresh, gbox, maxiter) #update the frame for the various values mask=(crarr>0) if update: struct[i].data[mask]=crarr[mask] #track the number of cosmic rays ncr=mask.sum() totcr += ncr #if verbose print out information if log: message='%25s[%1d] %i' % (infile, i, ncr) log.message(message, with_header=False, with_stdout=verbose) #correct the BPM frame if saltkey.found('BPMEXT', struct[i]): b_i=saltkey.get('BPMEXT', struct[i]) try: struct[b_i].data[mask]=1 except Exception, e: msg='Cannot update the BPM frame in %s[%i] because %s' % (infile, b_i, e) raise SaltError(msg)
def specslitnormalize(images, outimages, outpref, response=None, response_output=None, order=2, conv=1e-2, niter=20, startext=0, clobber=False, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outfile', outimages, outpref, infiles, '') # read in the response function response = saltio.checkfornone(response) if response: log.message('Loading response from %s' % response) response = readresponse(response) # Identify the lines in each file for img, ofile in zip(infiles, outfiles): # open the image hdu = saltio.openfits(img) for i in range(startext, len(hdu)): if hdu[i].name == 'SCI': log.message('Normalizing extension %i in %s' % (i, img)) # things that will change for each slit # set up the data for the source try: data = hdu[i].data except Exception as e: message = \ 'Unable to read in data array in %s because %s' % \ (img, e) raise SALTSpecError(message) if response is None: response = create_response( data, spatial_axis=1, order=order, conv=conv, niter=niter) if response_output: write_response(response, clobber=clobber) else: # add a check that the response is the same shape as # the data if len(response) != data.shape[0]: raise SALTSpecError( 'Length of response function does not equal size of image array') # correct the data data = data / response # correct the variance frame if saltkey.found('VAREXT', hdu[i]): vhdu = saltkey.get('VAREXT', hdu[i]) hdu[vhdu].data = hdu[vhdu].data / response saltio.writefits(hdu, ofile, clobber=clobber)
def gain(struct,mult=True,usedb=False, dblist=None, ampccd=2, log=None, verbose=True): """gain processes a image hduList and gain corrects each amplifier. It can either use gain settings in the header or those supplied in a config file which would be suppleid in the dblist (see helpfile for structure of the config file). If variance frames exist, it will update those for changes in the header value as well. In the end, it will update the gain with a value of one signfing the data has been transformed into e- from ADU The program will look for the non-linear gain settings which are given by: e = GAIN*(1 + GAIN1*E-6*ADU)*ADU mult--if true, multiple the gains usedb--use the values in the dblist, if false use the header values dblist--values for the gain and readnoise from the ampccd--number of amplifiers per ccd dblist should have the following lists: speed, rate, gain, noise, bias, amp """ #get the infile name infile=saltkey.getimagename(struct[0]) #how many science extensions nsciext = saltkey.get('NSCIEXT',struct[0]) #how many data extensions nextend = saltkey.get('NSCIEXT',struct[0]) # how many amplifiers? amplifiers = ampccd*saltkey.get('NCCDS',struct[0]) #read the gain and rospeed for the image gainset = saltkey.get('GAINSET',struct[0]) rospeed = saltkey.get('ROSPEED',struct[0]) #loop through each amplifier and gain correct it if log: message = '%28s %6s %5s %3s %5s %5s' \ % ('HDU','GAIN','SPEED','AMP','GAIN','NOISE') log.message('\n ---------------------------------------------------', \ with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' ---------------------------------------------------', \ with_header=False, with_stdout=verbose) for i in range(nsciext): hdu = i + 1 amp = i%amplifiers+1 #get the gain and rdnoise values for the array if usedb: gain, rdnoise=get_values(dblist, gainset, rospeed, amp) gain1=0 else: gain = saltkey.get('GAIN',struct[hdu]) rdnoise = saltkey.get('RDNOISE',struct[hdu]) try: gain1=saltkey.get('GAIN1',struct[hdu]) except: gain1=0 if mult: #correct the gain gainmult=1 try: data=struct[hdu].data struct[hdu].data=gain*data+gain1*data**2 except Exception as e: msg='Cannot gain correct %s[%i] because %s' % (infile, hdu, e) raise SaltError(msg) #correct the variance frame if saltkey.found('VAREXT', struct[hdu]): vhdu=saltkey.get('VAREXT', struct[hdu]) try: vdata=struct[vhdu].data struct[vhdu].data=vdata*gain*(1+2*gain1*1e-6*data) except Exception as e: msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e) raise SaltError(msg) else: gainmult=gain #update the headers if usedb: saltkey.put('GAIN',gain,struct[hdu]) saltkey.put('RDNOISE',rdnoise,struct[hdu]) #add a keyword indicating what action was taken saltkey.new('GAINMULT',gainmult,'Gain multiplication', struct[hdu]) #if logging is true, then print out the following information if log: message = '%25s[%1d] %6s %5s %2s %6.2f %5.2f' \ % (infile,hdu,gainset,rospeed,amp, gain, rdnoise) log.message(message, with_header=False, with_stdout=verbose) #just to make it look pretty if log: log.message('', with_header=False, with_stdout=verbose) return struct
def salteditkey(images,outimages,outpref, keyfile, recfile=None,clobber=False,logfile='salt.log',verbose=True): with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') #verify that the input and output lists are the same length saltio.comparelists(infiles,outfiles,'Input','output') #is key file defined saltio.argdefined('keyfile',keyfile) keyfile = keyfile.strip() saltio.fileexists(keyfile) # if the data are the same, set up to use update instead of write openmode='copyonwrite' if (infiles!=outfiles): openmode='copyonwrite' # determine the date of the observations obsdate=saltstring.makeobsdatestr(infiles, 1,9) if len(obsdate)!=8: message = 'Either FITS files from multiple dates exist, ' message += 'or raw FITS files exist with non-standard names.' log.warning(message) # FITS file columns to record keyword changes fitcol = [] keycol = [] oldcol = [] newcol = [] # Set up the rules to change the files keyedits=readkeyfile(keyfile, log=log, verbose=verbose) #now step through the images for img, oimg in zip(infiles, outfiles): #determine the appropriate keyword edits for the image klist=[] for frange in keyedits: if checkfitsfile(img, frange, keyedits[frange]): klist.append(keyedits[frange][3]) if klist: #open up the new files struct = saltio.openfits(img,mode=openmode) struct.verify('fix') for kdict in klist: for keyword in kdict: #record the changes value=kdict[keyword] fitcol.append(img) keycol.append(keyword) newcol.append(value) try: oldcol.append(struct[0].header[keyword].lstrip()) except: oldcol.append('None') #update the keyword if saltkey.found(keyword, struct[0]): try: saltkey.put(keyword,value,struct[0]) message='\tUpdating %s in %s to %s' % (keyword, os.path.basename(img), value) log.message(message, with_header=False, with_stdout=verbose) except Exception, e: message = 'Could not update %s in %s because %s' % (keyword, img, str(e)) raise SaltError(message) else: try: saltkey.new(keyword.strip(),value,'Added Comment',struct[0]) message='\tAdding %s in %s to %s' % (keyword, os.path.basename(img), value) log.message(message, with_header=False, with_stdout=verbose) except Exception,e : message = 'Could not update %s in %s because %s' % (keyword, img, str(e)) raise SaltError(message) #updat the history keywords #fname, hist=history(level=1, wrap=False, exclude=['images', 'outimages', 'outpref']) #saltkey.housekeeping(struct[0],'SAL-EDT', 'Keywords updated by SALTEDITKEY', hist) #write the file out if openmode=='update': saltio.updatefits(struct) message = 'Updated file ' + os.path.basename(oimg) else: saltio.writefits(struct, oimg, clobber) message = 'Created file ' + os.path.basename(oimg) log.message(message, with_header=False, with_stdout=True) struct.close()
def rectify(hdu, soldict, caltype='line', function='poly', order=3, inttype='interp', w1=None, w2=None, dw=None, nw=None, blank=0, pixscale=0.0, time_interp=False, clobber=True, log=None, verbose=True): """Read in an image and a set of wavlength solutions. Calculate the best wavelength solution for a given dataset and then apply that data set to the image return """ #set the set_w1=(w1 is None) set_w2=(w2 is None) set_dw=(dw is None) set_nw=(nw is None) #set up the time of the observation dateobs=saltkey.get('DATE-OBS', hdu[0]) utctime=saltkey.get('TIME-OBS', hdu[0]) exptime=saltkey.get('EXPTIME', hdu[0]) instrume=saltkey.get('INSTRUME', hdu[0]).strip() grating=saltkey.get('GRATING', hdu[0]).strip() if caltype=='line': grang=saltkey.get('GRTILT', hdu[0]) arang=saltkey.get('CAMANG', hdu[0]) else: grang=saltkey.get('GR-ANGLE', hdu[0]) arang=saltkey.get('AR-ANGLE', hdu[0]) filtername=saltkey.get('FILTER', hdu[0]).strip() slitname=saltkey.get('MASKID', hdu[0]) slit=st.getslitsize(slitname) xbin, ybin = saltkey.ccdbin( hdu[0]) timeobs=enterdatetime('%s %s' % (dateobs, utctime)) #check to see if there is more than one solution if caltype=='line': if len(soldict)==1: sol=soldict.keys()[0] slitid=None if not matchobservations(soldict[sol], instrume, grating, grang, arang, filtername, slitid): msg='Observations do not match setup for transformation but using the solution anyway' if log: log.warning(msg) for i in range(1,len(hdu)): if hdu[i].name=='SCI': if log: log.message('Correcting extension %i' % i) istart=int(0.5*len(hdu[i].data)) #open up the data #set up the xarr and initial wavlength solution xarr=np.arange(len(hdu[i].data[istart]), dtype='int64') #get the slitid try: slitid=saltkey.get('SLITNAME', hdu[i]) except: slitid=None #set up a wavelength solution try: w_arr=findsol(xarr, soldict, istart, caltype, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order ) except SALTSpecError, e: if slitid: msg='SLITID %s: %s' % (slitid, e) if log: log.warning(msg) continue else: raise SALTSpecError(e) if w_arr is None: w_arr=findsol(xarr, soldict, istart, 'rss', timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order ) #set up the output x-axis if set_w1: w1=w_arr.min() if set_w2: w2=w_arr.max() if set_nw: nw=len(xarr) if set_dw: dw=float(w2-w1)/nw nw_arr=createoutputxaxis(w1, w2, nw) #setup the VARIANCE and BPM frames if saltkey.found('VAREXT', hdu[i]): varext=saltkey.get('VAREXT', hdu[i]) else: varext=None #setup the BPM frames if saltkey.found('BPMEXT', hdu[i]): bpmext=saltkey.get('BPMEXT', hdu[i]) else: bpmext=None #for each line in the data, determine the wavelength solution #for a given line in the image for j in range(len(hdu[i].data)): #find the wavelength solution for the data w_arr=findsol(xarr, soldict, j, caltype, timeobs, exptime, instrume, grating, grang, arang, filtername, slit, xbin, ybin, slitid, function, order ) #apply that wavelength solution to the data if w_arr is not None: try: hdu[i].data[j,:]=st.interpolate(nw_arr, w_arr, hdu[i].data[j,:], inttype, left=blank, right=blank) except Exception, e: hdu[i].data[j,:]=hdu[i].data[j,:]*0.0+blank msg='In row %i, solution cannot be found due to %s' % (i, e) #correct the variance frame if varext: try: hdu[varext].data[j,:]=st.interpolate(nw_arr, w_arr, hdu[varext].data[j,:], inttype, left=blank, right=blank) except Exception, e: msg='In row %i, solution cannot be found due to %s' % (i, e) #correct the BPM frame if bpmext: try: hdu[bpmext].data[j,:]=st.interpolate(nw_arr, w_arr, hdu[bpmext].data[j,:], inttype, left=blank, right=blank) except Exception, e: msg='In row %i, solution cannot be found due to %s' % (i, e)
def saltflat(images,outimages,outpref, flatimage,minflat=1, allext=False, clobber=False, \ logfile='salt.log',verbose=True): with logging(logfile,debug) as log: # Check the input images infiles = saltio.argunpack ('Input',images) # create list of output files outfiles=saltio.listparse('Outfile', outimages, outpref,infiles,'') #verify that the input and output lists are the same length saltio.comparelists(infiles,outfiles,'Input','output') # check flatfield image exists flaimage= flatimage.strip() saltio.fileexists(flatimage) flatstruct= saltio.openfits(flatimage) # Normalize the flat field image # This requires to go through each science extension and divide it by # mean of the image. Things that have to be checked: # that data exist, that it is a science extension #determine the global mean fmean=0 fcount=0 #replace bad pixels for i in range(len(flatstruct)): if flatstruct[i].data is not None and (flatstruct[i].name=='SCI' or flatstruct[i].name=='PRIMARY'): data = flatstruct[i].data mask = (data > minflat) if (numpy.nan==flatstruct[i].data).sum() or (numpy.inf==flatstruct[i].data).sum(): message = '\nWARNING -- SALTFLAT: %s contains invalid values' % flatimage log.warning(message,with_stdout=verbose) flatstruct[i].data[mask==0] = minflat flatstruct[i].data[flatstruct[i].data==numpy.inf] = minflat #determine the mean mask = (data > minflat) fmean += data[mask].sum() fcount += data[mask].size if fcount>0: fmean=fmean/fcount for i in range(len(flatstruct)): if flatstruct[i].name=='PRIMARY': #is it a flat--if not throw a warning try: key_ccdtype=saltkey.get('CCDTYPE', flatstruct[i]) except: key_ccdtype=None if key_ccdtype!='FLAT': message = '\nWARNING -- SALTFLAT: %s does not have CCDTYPE=FLAT' % flatimage log.warning(message,with_stdout=verbose) #if there are data, normalize it if flatstruct[i].data is not None: flatstruct[i].data=flatnormalize(flatstruct[i].data, minflat) #Noramlize the science extensions if flatstruct[i].name=='SCI': if flatstruct[i].data is not None: if allext is False: fmean=flatstruct[i].data.mean() flatstruct[i].data=flatnormalize(flatstruct[i].data, minflat, fmean) #Apply to the variance frames if saltkey.found('VAREXT', flatstruct[i]): varext=saltkey.get('VAREXT',flatstruct[i]) flatstruct[varext].data=flatstruct[varext].data/fmean**2 # open each raw image file for infile, outfile in zip(infiles,outfiles): struct = saltio.openfits(infile) # flat field correct the image outstruct = flat(struct, flatstruct) try: pass except Exception,e: msg='Unable to flatten %s because %s' % (infile, e) raise SaltError(msg) #add any header keywords like history fname, hist=history(level=1, wrap=False) saltkey.housekeeping(struct[0],'SFLAT', 'File flatfield corrected', hist) #write it out and close it saltio.writefits(outstruct,outfile,clobber=clobber) saltio.closefits(struct) #output the information log.message('Flatfields image %s using %s' % (infile, flatimage), with_header=False, with_stdout=verbose) #clost the flatfield image saltio.closefits(flatstruct)
# calculate the new utc value try: ntime=salttime.sex2dec(utc) ntime=ntime-tdiff/3600.0 newutc=salttime.dec2sex(ntime) except Exception,e: msg='Could not update UTC in %i header of image %s because %s' % (ext, infile, e) raise SaltError(msg) return struct # update the headers if utc==saltsafekey.get('UTC-OBS', struct): expt_string='%5.4f' % real_expt td_string='%5.4f' % tdiff if not saltsafekey.found('DUTC', struct): try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile) saltsafekey.new('DWETIME', expt_string, 'Dwell Time', struct, infile) saltsafekey.new('DUTC', td_string, 'Change in UTC time', struct, infile) except Exception, e: msg='Could not update %i header of image %s because %s' % (ext, infile, e) raise SaltIOError(msg) else: try: saltsafekey.put('UTC-OBS', newutc, struct, infile) saltsafekey.put('TIME-OBS', newutc, struct, infile) saltsafekey.put('DWETIME', real_expt, struct, infile) saltsafekey.put('DUTC', tdiff, struct, infile) except Exception, e:
def hducombine(hdu_list, outhdu, ext, method='average', datasec=None, reject=None, mask=False, weight=False, scale=None, statsec=None, blank=0, lthresh=3, hthresh=3): """Combine a set of images in imlist """ #set up i as the extionsion variable as shortcut i = ext nimages = len(hdu_list) #set all the data arrays data_list = [] gain = np.zeros(nimages) rdnoise = np.zeros(nimages) varext = None bpmext = None #set the datasec in case it is none as the full image if datasec is None: sh = outhdu[ext].data.shape y1, x1 = (0, 0) y2, x2 = outhdu[i].data.shape else: y1, y2, x1, x2 = datasec dshape = outhdu[i].data[y1:y2, x1:x2].shape dtype = outhdu[i].data[y1:y2, x1:x2].dtype wei_arr = outhdu[i].data[y1:y2, x1:x2] * 0.0 #check for variance frame if saltkey.found('VAREXT', outhdu[i]) and weight: varext = saltkey.get('VAREXT', outhdu[i]) var_list = [] #check for bad pixel mask if saltkey.found('BPMEXT', outhdu[i]) and mask: bpmext = saltkey.get('BPMEXT', outhdu[i]) bpm_list = [] #create the lists of arrays and scale the arrays if requests mean_scale = 0 data_arr = np.zeros((nimages, dshape[0], dshape[1]), dtype=dtype) for j in range(nimages): data_arr[j, :, :] = hdu_list[j][i].data[y1:y2, x1:x2] #calculate the scale if scale: scale_val = CalculateScale(data_arr, scale, statsec) mean_scale += scale_val else: scale_val = 1 mean_scale += 1 if varext: var_list.append(hdu_list[j][varext].data[y1:y2, x1:x2] / scale_val) if bpmext: bpm_list.append(hdu_list[j][bpmext].data[y1:y2, x1:x2]) #get the gain and rdnoise if reject == 'ccdclip': if saltkey.found('GAINMULT', hdu_list[j][i]): gain[j] = 1 else: gain[j] = saltkey.get('GAIN', hdu_list[j][i]) rdnoise[j] = saltkey.get('RDNOISE', hdu_list[j][i]) #convert the lists to arrays if varext: var_arr = np.array(var_list) ivar_arr = 1.0 / var_arr else: var_arr = None ivar_arr = None if bpmext: bpm_arr = np.array(bpm_list) else: bpm_arr = None #reject outliers if set bpm_arr=RejectArray(data_arr, reject=reject, var=var_arr, bpm=bpm_arr, \ lthresh=lthresh, hthresh=hthresh, gain=gain, rdnoise=rdnoise) #calculate the average values outdata, outwei = CombineArray(data_arr, method=method, ivar=ivar_arr, bpm=bpm_arr) outhdu[i].data[y1:y2, x1:x2] = outdata if scale is not None: mean_scale = mean_scale / nimages outhdu[i].data[y1:y2, x1:x2] *= mean_scale #create the combine variance frame if varext: outhdu[varext].data[y1:y2, x1:x2], tmp_arr = CombineArray(var_arr, method=method, ivar=ivar_arr, bpm=bpm_arr) del tmp_arr if scale is not None: outhdu[varext].data *= mean_scale #check to see if any of the pixels have no values and replace with blank #if wei_arr.any()==0: # wmask=(wei_arr==0) # outhdu[i].data[wmask]=blank #create the combine BPM frames if bpmext: outhdu[bpmext].data = 1.0 * (wei_arr == 0) return outhdu
def specslit(image, outimage, outpref, exttype='auto', slitfile='', outputslitfile='', regprefix='', sections=3, width=25, sigma=2.2, thres=6, order=3, padding=5, yoffset=0, inter=False, clobber=True, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # check all the input and make sure that all the input needed is provided # by the user # read the image or image list and check if each in the list exist infiles = saltio.argunpack('Input', image) # unpack the outfiles outfiles = saltio.listparse( 'Outimages', outimage, outpref, infiles, '') # from the extraction type, check whether the input file is specified. # if the slitfile parameter is specified then use the slit files for # the extraction. if the extraction type is auto then use image for the # detection and the slit extraction if exttype == 'rsmt' or exttype == 'fits' or exttype == 'ascii' or exttype == 'ds9': slitfiles = saltio.argunpack('Slitfile', slitfile) if len(slitfiles) == 1: slitfiles = slitfiles * len(infiles) saltio.comparelists(infiles, slitfiles, 'image', 'slitfile') elif exttype == 'auto': slitfiles = infiles log.message( 'Extraction type is AUTO. Slit detection will be done from image') # read in if an optional ascii file is requested if len(outputslitfile) > 0: outslitfiles = saltio.argunpack('Outslitfiles', outputslitfile) saltio.comparelists( infiles, outslitfiles, 'image', 'outputslitfile') else: outslitfiles = [''] * len(infiles) # check if the width and sigma parameters were specified. # default is 25 and 2.2 if width < 10.: msg = 'The width parameter needs be a value larger than 10' raise SALTSpecError(msg) if sigma < 0.0: msg = 'Sigma must be greater than zero' raise SaltSpecError(msg) # check the treshold parameter. this needs to be specified by the user if thres <= 0.0: msg = 'Threshold must be greater than zero' raise SaltSpecError(msg) # check to make sure that the sections are greater than the order if sections <= order: msg = 'Number of sections must be greater than the order for the spline fit' raise SaltSpecError(msg) # run through each of the images and extract the slits for img, oimg, sfile, oslit in zip( infiles, outfiles, slitfiles, outslitfiles): log.message('Proccessing image %s' % img) # open the image struct = saltio.openfits(img) ylen, xlen = struct[1].data.shape xbin, ybin = saltkey.ccdbin(struct[0], img) # setup the VARIANCE and BPM frames if saltkey.found('VAREXT', struct[1]): varext = saltkey.get('VAREXT', struct[1]) varlist = [] else: varext = None # setup the BPM frames if saltkey.found('BPMEXT', struct[1]): bpmext = saltkey.get('BPMEXT', struct[1]) bpmlist = [] else: bpmext = None # open the slit definition file or identify the slits in the image slitmask = None ycheck = False if exttype == 'rsmt': log.message('Using slits from %s' % sfile) if yoffset is None: yoffset = 0 ycheck = True slitmask = mt.read_slitmask_from_xml(sfile) xpos = -0.3066 ypos = 0.0117 cx = int(xlen / 2.0) cy = int(ylen / 2.0) + ypos / 0.015 / ybin + yoffset order, slit_positions = mt.convert_slits_from_mask( slitmask, order=1, xbin=xbin, ybin=ybin, pix_scale=0.1267, cx=cx, cy=cy) sections = 1 elif exttype == 'fits': log.message('Using slits from %s' % sfile) order, slit_positions = read_slits_from_fits(sfile) elif exttype == 'ascii': log.message('Using slits from %s' % sfile) order, slit_positions = mt.read_slits_from_ascii(sfile) elif exttype == 'ds9': log.message('Using slits from %s' % sfile) order, slit_positions, slitmask = mt.read_slits_from_ds9( sfile, order=order) slitmask = None sections = 1 elif exttype == 'auto': log.message('Identifying slits in %s' % img) # identify the slits in the image order, slit_positions = identify_slits( struct[1].data, order, sections, width, sigma, thres) # write out the slit identifications if ofile has been supplied if oslit: log.message('Writing slit positions to %s' % oslit) mt.write_outputslitfile(slit_positions, oslit, order) if ycheck: slit_positions, dy = check_ypos(slit_positions, struct[1].data) log.message('Using an offset of {}'.format(dy)) # extract the slits spline_x = mt.divide_image(struct[1].data, sections) spline_x = 0.5 * (np.array(spline_x[:-1]) + np.array(spline_x[1:])) extracted_spectra, spline_positions = mt.extract_slits(slit_positions, spline_x, struct[1].data, order=order, padding=padding) if varext: extracted_var, var_positions = mt.extract_slits(slit_positions, spline_x, struct[varext].data, order=order, padding=padding) if bpmext: extracted_bpm, bpm_positions = mt.extract_slits(slit_positions, spline_x, struct[bpmext].data, order=order, padding=padding) # write out the data to the new array # create the new file hdulist = fits.HDUList([struct[0]]) # log the extracted spectra if needed log.message('', with_stdout=verbose) # setup output ds9 file if regprefix: regout = open( regprefix + os.path.basename(img).strip('.fits') + '.reg', 'w') regout.write('# Region file format: DS9 version 4.1\n') regout.write('# Filename: %s\n' % img) regout.write( 'global color=green dashlist=8 3 width=1 font="helvetica 10 normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\nphysical\n') # add each imglist = [] nslits = len(spline_positions) for i in range(nslits): y1 = spline_positions[i][0].min() y2 = spline_positions[i][1].max() msg = 'Extracted Spectra %i between %i to %i' % (i + 1, y1, y2) # log.message(msg, with_header=False, with_stdout=verbose) sdu = fits.ImageHDU( extracted_spectra[i], header=struct[1].header) if varext: vdu = fits.ImageHDU( extracted_var[i], header=struct[varext].header) sdu.header['VAREXT'] = i + nslits + 1 varlist.append(vdu) if bpmext: bdu = fits.ImageHDU( extracted_bpm[i], header=struct[bpmext].header) sdu.header['BPMEXT']= i + 2 * nslits + 1 bpmlist.append(bdu) imglist.append(sdu) # add in some additional keywords imglist[i].header['MINY'] = (y1, 'Lower Y value in original image') imglist[i].header['MAXY'] = (y2, 'Upper Y value in original image') if regprefix: xsize = struct[1].data.shape[1] xsize = int(0.5 * xsize) rtext = '' if slitmask: # rtext='%s, %8.7f, %8.7f, %3.2f' % (slitmask.slitlets.data[i]['name'], slitmask.slitlets.data[i]['targ_ra'], slitmask.slitlets.data[i]['targ_dec'], slitmask.slitlets.data[i]['slit_width']) pass regout.write('box(%i,%i, %i, %i) #text={%s}\n' % ( xsize, 0.5 * (y1 + y2), 2 * xsize, y2 - y1, rtext)) # add slit information if slitmask: imglist[i].header['SLITNAME'] = (slitmask.slitlets.data[i]['name'], 'Slit Name') imglist[i].header['SLIT_RA'] = (slitmask.slitlets.data[i]['targ_ra'], 'Slit RA') imglist[i].header['SLIT_DEC'] = (slitmask.slitlets.data[i]['targ_dec'], 'Slit DEC') imglist[i].header['SLIT'] = (slitmask.slitlets.data[i]['slit_width'], 'Slit Width') # add to the hdulist hdulist += imglist if varext: hdulist += varlist if bpmext: hdulist += bpmlist # write the slit positions to the header # create the binary table HDU that contains the split positions tbhdu = mt.slits_HDUtable(slit_positions, order) bintable_hdr = tbhdu.header # add the extname parameter to the extension tbhdu.header['EXTNAME'] = 'BINTABLE' # add the extname parameter to the extension hdulist[0].header['SLITEXT'] = len(hdulist) hdulist.append(tbhdu) # add addition header information about the mask if slitmask: hdulist[0].header['MASKNAME'] = (slitmask.mask_name, 'SlitMask Name') hdulist[0].header['MASK_RA'] = (slitmask.center_ra, 'SlitMask RA') hdulist[0].header['MASK_DEC'] = ( slitmask.center_dec, 'SlitMask DEC') hdulist[0].header['MASK_PA'] = ( slitmask.position_angle, 'SlitMask Position Angle') # write out the image saltio.writefits(hdulist, oimg, clobber)
def specslitnormalize(images, outimages, outpref, response=None, response_output=None, order=2, conv=1e-2, niter=20, startext=0, clobber=False, logfile='salt.log', verbose=True): with logging(logfile, debug) as log: # Check the input images infiles = saltio.argunpack('Input', images) # create list of output files outfiles = saltio.listparse('Outfile', outimages, outpref, infiles, '') # read in the response function response = saltio.checkfornone(response) if response: log.message('Loading response from %s' % response) response = readresponse(response) # Identify the lines in each file for img, ofile in zip(infiles, outfiles): # open the image hdu = saltio.openfits(img) for i in range(startext, len(hdu)): if hdu[i].name == 'SCI': log.message('Normalizing extension %i in %s' % (i, img)) # things that will change for each slit # set up the data for the source try: data = hdu[i].data except Exception as e: message = \ 'Unable to read in data array in %s because %s' % \ (img, e) raise SALTSpecError(message) if response is None: response = create_response(data, spatial_axis=1, order=order, conv=conv, niter=niter) if response_output: write_response(response, clobber=clobber) else: # add a check that the response is the same shape as # the data if len(response) != data.shape[0]: raise SALTSpecError( 'Length of response function does not equal size of image array' ) # correct the data data = data / response # correct the variance frame if saltkey.found('VAREXT', hdu[i]): vhdu = saltkey.get('VAREXT', hdu[i]) hdu[vhdu].data = hdu[vhdu].data / response saltio.writefits(hdu, ofile, clobber=clobber)
def bias(struct,subover=True,trim=True, subbias=False, bstruct=None, median=False, function='polynomial',order=3,rej_lo=3,rej_hi=3,niter=10, plotover=False, log=None, verbose=True): """Bias subtracts the bias levels from a frame. It will fit and subtract the overscan region, trim the images, and subtract a master bias if required. struct--image structure subover--subtract the overscan region trim--trim the image subbias--subtract master bias bstruct--master bias image structure median--use the median instead of mean in image statistics function--form to fit to the overscan region order--order for the function rej_lo--sigma of low points to reject in the fit rej_hi--sigma of high points to reject in the fit niter--number of iterations log--saltio log for recording information verbose--whether to print to stdout """ infile=saltkey.getimagename(struct[0]) # how many extensions? nsciext = saltkey.get('NSCIEXT',struct[0]) nextend = saltkey.get('NEXTEND',struct[0]) nccd = saltkey.get('NCCDS',struct[0]) # how many amplifiers?--this is hard wired amplifiers = 2 * nccd #log the process if subover and log: message = '%28s %7s %5s %4s %6s' % \ ('HDU','Overscan','Order','RMS','Niter') log.message('\n --------------------------------------------------', with_header=False, with_stdout=verbose) log.message(message, with_header=False, with_stdout=verbose) log.message(' --------------------------------------------------', with_header=False, with_stdout=verbose) if (plotover): plt.figure(1) plt.axes([0.1,0.1,0.8,0.8]) plt.xlabel('CCD Column') plt.ylabel('Pixel Counts (e-)') plt.ion() #loop through the extensions and subtract the bias for i in range(1,nsciext+1): if struct[i].name=='SCI': #get the bias section biassec = saltkey.get('BIASSEC',struct[i]) y1,y2,x1,x2 = saltio.getSection(biassec, iraf_format=True) #get the data section datasec = saltkey.get('DATASEC',struct[i]) dy1,dy2, dx1, dx2 = saltio.getSection(datasec, iraf_format=True) #setup the overscan region if subover: yarr=np.arange(y1,y2, dtype=float) data=struct[i].data odata=struct[i].data[y1:y2,x1:x2] if median: odata=np.median((struct[i].data[y1:y2,x1:x2]),axis=1) olevel=np.median((struct[i].data[y1:y2,x1:x2])) saltkey.new('OVERSCAN','%f' % (olevel),'Overscan median value', struct[i]) else: odata=np.mean((struct[i].data[y1:y2,x1:x2]),axis=1) olevel=np.mean((struct[i].data[y1:y2,x1:x2])) saltkey.new('OVERSCAN','%f' % (olevel),'Overscan mean value', struct[i]) #fit the overscan region ifit=saltfit.interfit(yarr, odata, function=function, \ order=order, thresh=rej_hi, niter=niter) try: ifit.interfit() coeffs=ifit.coef ofit=ifit(yarr) omean, omed, osigma=saltstat.iterstat((odata-ofit), sig=3, niter=5) except ValueError: #catch the error if it is a zero array ofit=np.array(yarr)*0.0 osigma=0.0 except TypeError: #catch the error if it is a zero array ofit=np.array(yarr)*0.0 osigma=0.0 #if it hasn't been already, convert image to #double format struct[i].data = 1.0 * struct[i].data try: struct[i].header.remove('BZERO') struct[i].header.remove('BSCALE') except: pass #subtract the overscan region for j in range(len(struct[i].data[0])): struct[i].data[y1:y2,j] -= ofit #report the information if log: message = '%25s[%1d] %8.2f %3d %7.2f %3d' % \ (infile, i, olevel, order, osigma, niter) log.message(message, with_stdout=verbose, with_header=False) #add the statistics to the image header saltkey.new('OVERRMS','%f' % (osigma),'Overscan RMS value', struct[i]) #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu=saltkey.get('VAREXT', struct[i]) try: vdata=struct[vhdu].data #The bias level should not be included in the noise from the signal for j in range(len(struct[i].data[0])): vdata[y1:y2,j] -= ofit #add a bit to make sure that the minimum error is the rednoise rdnoise= saltkey.get('RDNOISE',struct[i]) vdata[vdata<rdnoise**2]=rdnoise**2 struct[vhdu].data=vdata+osigma**2 except Exception, e: msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e) raise SaltError(msg) #plot the overscan region if plotover: plt.plot(yarr, odata) plt.plot(yarr, ofit) #trim the data and update the headers if trim: struct[i].data=struct[i].data[dy1:dy2,dx1:dx2] datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']' saltkey.put('DATASEC',datasec,struct[i]) #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu=saltkey.get('VAREXT', struct[i]) struct[vhdu].data=struct[vhdu].data[dy1:dy2,dx1:dx2] datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']' saltkey.put('DATASEC',datasec,struct[vhdu]) #update the BPM frame if saltkey.found('BPMEXT', struct[i]): bhdu=saltkey.get('BPMEXT', struct[i]) struct[bhdu].data=struct[bhdu].data[dy1:dy2,dx1:dx2] datasec = '[1:'+str(dx2-dx1)+',1:'+str(dy2-dy1)+']' saltkey.put('DATASEC',datasec,struct[bhdu]) #subtract the master bias if necessary if subbias and bstruct: struct[i].data -= bstruct[i].data #update the variance frame if saltkey.found('VAREXT', struct[i]): vhdu=saltkey.get('VAREXT', struct[i]) try: vdata=struct[vhdu].data struct[vhdu].data=vdata+bstruct[vhdu].data except Exception, e: msg='Cannot update the variance frame in %s[%i] because %s' % (infile, vhdu, e) raise SaltError(msg)
def quickclean(filename, interp='linear', cleanup=True, clobber=False, logfile='saltclean.log', verbose=True): """Start the process to reduce the data and produce a single mosaicked image""" print filename #create the input file name status=0 infile=os.path.basename(filename) rawpath=os.path.dirname(filename) outpath='./' outfile=outpath+'mbxp'+infile print infile, rawpath, outpath #check to see if it exists and return if clobber is no if os.path.isfile(outfile) and not clobber: return #set up the files needed if infile[0]=='P': gaindb = iraf.osfn('pysalt$data/rss/RSSamps.dat') xtalkfile = iraf.osfn('pysalt$data/rss/RSSxtalk.dat') geomfile = iraf.osfn('pysalt$data/rss/RSSgeom.dat') elif infile[0]=='S': gaindb = iraf.osfn('pysalt$data/scam/SALTICAMamps.dat') xtalkfile = iraf.osfn('pysalt$data/scam/SALTICAMxtalk.dat') geomfile = iraf.osfn('pysalt$data/scam/SALTICAMgeom.dat') #verify the file hdu=saltio.openfits(rawpath+'/'+infile) hdu.verify('exception') #check to see if detmode is there if not saltkey.found('DETMODE', hdu[0]): return #reduce the file saltred.saltprepare(images=filename,outimages='',outpref=outpath+'p', \ createvar=False, badpixelimage=None, clobber=clobber,logfile=logfile,verbose=verbose) pinfile=outpath+'p'+infile saltred.saltgain(pinfile, outimages=pinfile, outpref='', gaindb=gaindb,usedb=False, mult=True,clobber=True, logfile=logfile, verbose=verbose) saltred.saltxtalk(pinfile,outimages='',outpref='x',xtalkfile=xtalkfile,clobber=clobber, logfile=logfile,verbose=verbose) #saltred.saltslot(images=pinfile,outimages='',outpref=outpath+'bx',gaindb=gaindb, # xtalkfile=xtalkfile,clobber=clobber,logfile=logfile,verbose=verbose, # status=0) xinfile=outpath+'xp'+infile saltred.saltbias(images=xinfile,outimages='',outpref='b',subover=True,trim=True,subbias=False, masterbias='', median=False,function='polynomial',order=5,rej_lo=3,rej_hi=3,niter=10, plotover=False,turbo=False,logfile=logfile, clobber=clobber, verbose=verbose) biasfile=outpath+'bxp'+infile if hdu[0].header['CCDTYPE']=='OBJECT' and hdu[0].header['EXPTIME']>90: saltcrclean(images=biasfile, outimages=biasfile, outpref='', crtype='median',thresh=5,mbox=5, \ bthresh=3, flux_ratio=0.2, bbox=25, gain=1, rdnoise=5, fthresh=5,\ bfactor=2, gbox=0, maxiter=5, multithread=True, clobber=True, \ logfile='salt.log', verbose=True) saltred.saltmosaic(images=biasfile, outimages='',outpref=outpath+'m',geomfile=geomfile, interp=interp,cleanup=cleanup,clobber=clobber,logfile=logfile, verbose=verbose) profile=outpath+'mbxp'+infile #remove intermediate steps if cleanup: if os.path.isfile(pinfile): os.remove(pinfile) if os.path.isfile(xinfile): os.remove(xinfile) if os.path.isfile(biasfile): os.remove(biasfile) return