def __init__(self, eventpcf): tini = time.time() # Open new log based on the file name logname = eventpcf[:-4] + "ini.log" global log log = le.Logedit(logname) self.logname = logname # initialize Univ Univ.__init__(self) pcf = rd.read_pcf(eventpcf) self.initpars(pcf) self.calc(pcf) self.read() self.check() self.save() # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("\nOutput files:") log.writelog("Data:") log.writelog(" " + cwd + self.eventname + "_ini.dat") log.writelog(" " + cwd + self.eventname + "_ini.h5") log.writelog("Log:") log.writelog(" " + cwd + logname) log.writelog("Figures:") log.writelog(" " + cwd + self.eventname + "-fig101.png") dt = t.hms_time(time.time() - tini) log.writeclose('\nEnd init and read. Time (h:m:s): %s' % dt)
def __init__(self, eventpcf, cwd): owd = os.getcwd() os.chdir(cwd) tini = time.time() # Open new log based on the file name logname = eventpcf[:-4] + "_ini.log" log = le.Logedit(logname) self.logname = logname # initialize Univ Univ.__init__(self) pcf, = rd.read_pcf(eventpcf, 'event', expand=False) self.initpars(pcf, log) self.calc(pcf, log) self.read(log) self.check(log) self.save() # Print time elapsed and close log: log.writelog("\nOutput files:") log.writelog("Data:") log.writelog(" " + cwd + '/' + self.eventname + "_ini.dat") log.writelog(" " + cwd + '/' + self.eventname + "_ini.h5") log.writelog("Log:") log.writelog(" " + logname) log.writelog("Figures:") log.writelog(" " + cwd + '/' + self.eventname + "-fig101.png") dt = t.hms_time(time.time() - tini) log.writeclose('\nEnd init and read. Time (h:m:s): %s' % dt) os.chdir(owd) if self.runp2: os.system("python3 poet.py p2")
def photometry(event, pcf, photdir, mute): tini = time.time() # Create photometry log logname = event.logname log = le.Logedit(photdir + "/" + logname, logname) log.writelog("\nStart " + photdir + " photometry: " + time.ctime()) parentdir = os.getcwd() + "/" os.chdir(photdir) # copy photom.pcf in photdir pcf.make_file("photom.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key).get()) maxnimpos, npos = event.maxnimpos, event.npos # allocating frame parameters: event.fp.aplev = np.zeros((npos, maxnimpos)) # aperture flux event.fp.aperr = np.zeros((npos, maxnimpos)) # aperture error event.fp.nappix = np.zeros((npos, maxnimpos)) # number of aperture pixels event.fp.skylev = np.zeros((npos, maxnimpos)) # background sky flux level event.fp.skyerr = np.zeros((npos, maxnimpos)) # sky error event.fp.nskypix = np.zeros((npos, maxnimpos)) # number of sky pixels event.fp.nskyideal = np.zeros( (npos, maxnimpos)) # ideal number of sky pixels event.fp.status = np.zeros((npos, maxnimpos)) # apphot return status event.fp.good = np.zeros((npos, maxnimpos)) # good flag # Aperture photometry: if not event.dooptimal or event.from_aper == None: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) aperr = Array("d", np.zeros(npos * maxnimpos)) nappix = Array("d", np.zeros(npos * maxnimpos)) skylev = Array("d", np.zeros(npos * maxnimpos)) skyerr = Array("d", np.zeros(npos * maxnimpos)) nskypix = Array("d", np.zeros(npos * maxnimpos)) nskyideal = Array("d", np.zeros(npos * maxnimpos)) status = Array("d", np.zeros(npos * maxnimpos)) good = Array("d", np.zeros(npos * maxnimpos)) # Size of chunk of data each core will process: chunksize = maxnimpos / event.ncores + 1 print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in np.arange(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in np.arange(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wans't done before: for pos in np.arange(npos): for i in np.arange(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.from_aper != None: # Load previous aperture photometry if required for optimal: evt = me.loadevent(parentdir + event.from_aper + "/" + event.eventname + "_pht") event.fp.aplev = evt.fp.aplev event.fp.aperr = evt.fp.aperr event.fp.nappix = evt.fp.nappix event.fp.skylev = evt.fp.skylev event.fp.skyerr = evt.fp.skyerr event.fp.nskypix = evt.fp.nskypix event.fp.nskyideal = evt.fp.nskyideal event.fp.status = evt.fp.status event.fp.good = evt.fp.good event.fp.apraw = evt.fp.apraw if event.dooptimal: ofp, psf = do.dooptphot(event.data, event.uncd, event.mask, event.fp, event.srcest, event.nimpos, rejlim=[10.45, 1000, 1.5], order=1, resize=event.oresize, norm=1, trim=event.otrim, log=log) event.fp = ofp event.psf = psf elif event.ispsf: # PSF aperture correction: log.writelog('Calculating PSF aperture:') event.aperfrac, event.psfnappix, event.psfskylev, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot(event.psfim, event.psfctr, event.photap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, med = event.skymed, expand = event.apscale, nappix = True, skylev = True, nskypix = True, nskyideal = True, status = True) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) # For running pixel-level decorrelation (pld) if event.ispld and event.npos == 1: event.apdata = pld.pld_box(event.data, event.targpos, event.pldhw, event.fp.skylev) log.writelog( "Created " + str(event.pldhw * 2 + 1) + "x" + str(event.pldhw * 2 + 1) + " box around centroid for pixel-level decorrelation and normalized it in time." ) elif event.ispld and event.npos != 1: log.writelog( "Could not perform pixel-level decorrelation because there is more than 1 nod position." ) # save print("\nSaving ...") me.saveevent(event, event.eventname + "_pht", delete=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.photdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_pht.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Photometry. Time (h:m:s): %s " % dt + " (" + photdir + ")") print("-------------- ------------\n")
def centering(event, pcf, centerdir, owd): os.chdir(centerdir) tini = time.time() # Create centering log log = le.Logedit(event.logname, event.logname) log.writelog("\nStart " + centerdir + " centering: " + time.ctime()) # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key)) # Check least asym parameters work: if event.method in ['lac', 'lag']: if event.ctrim < (event.cradius + event.csize) and event.ctrim != 0: event.ctrim = event.cradius + event.csize + 1 log.writelog('Trim radius is too small, changed to: %i' % event.ctrim) if event.psfctrim < (event.psfcrad + event.psfcsize) and event.psfctrim != 0: event.psfctrim = event.psfcrad + event.psfcsize + 1 log.writelog('PSF Trim radius is too small, changed to: %i' % event.psfctrim) # Centering bad pixel mask: centermask = np.ones((event.ny, event.nx)) if event.ymask is not None: ymask = np.asarray(event.ymask, int) xmask = np.asarray(event.xmask, int) for i in range(len(ymask)): centermask[ymask[i], xmask[i]] = 0 # PSF: # Re-evaluate if a PSF has been redefined: if event.newpsf is not None: event.ispsf = os.path.isfile(event.newpsf) if event.ispsf: event.psffile = event.newpsf log.writelog('The PSF file has been redefined!') log.writelog("PSF: " + event.psffile) # PSF Centering: if event.ispsf: event.psfim = fits.getdata(event.psffile) # Guess of the center of the PSF (center of psfim) psfctrguess = np.asarray(np.shape(event.psfim)) // 2 # Do not find center of PSF: if event.nopsfctr: event.psfctr = psfctrguess # Find center of PSF: else: if event.method == "bpf" or event.method == "ipf": method = "fgc" else: method = event.method event.psfctr, extra = cd.centerdriver(method, event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize, npskyrad=(event.npskyin, event.npskyout)) log.writelog('PSF center found.') else: event.psfim = None event.psfctr = None log.writelog('No PSF supplied.') # Find center of the mean Image: event.targpos = np.zeros((2, event.npos)) # Override target position estimate if specified if type(pcf.srcesty) != type(None) and type(pcf.srcestx) != type(None): srcesty = str(pcf.srcesty).split(',') srcestx = str(pcf.srcestx).split(',') if len(srcestx) != len(srcesty): print("WARNING: Length of srcest inputs do not match!") if len(srcestx) != event.npos or len(srcesty) != event.npos: print("WARNING: Length of srcest inputs do not match npos!") if len(srcestx) > 1 or len(srcesty) > 1: print( "Verify that srcest override order matches telescope pos order." ) for pos in range(event.npos): event.srcest[0, pos] = srcesty[pos] event.srcest[1, pos] = srcestx[pos] for pos in range(event.npos): print("Fitting mean image at pos: " + str(pos)) meanim = event.meanim[:, :, pos] guess = event.srcest[:, pos] targpos, extra = cd.centerdriver(event.method, meanim, guess, event.ctrim, event.cradius, event.csize, fitbg=event.fitbg, psf=event.psfim, psfctr=event.psfctr, expand=event.expand, npskyrad=(event.npskyin, event.npskyout)) event.targpos[:, pos] = targpos log.writelog("Center position(s) of the mean Image(s):\n" + str(np.transpose(event.targpos))) # Inclusion :::::::: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( x = Array("d", np.zeros(event.npos * event.maxnimpos)) y = Array("d", np.zeros(event.npos * event.maxnimpos)) xerr = Array("d", np.zeros(event.npos * event.maxnimpos)) yerr = Array("d", np.zeros(event.npos * event.maxnimpos)) xsig = Array("d", np.zeros(event.npos * event.maxnimpos)) ysig = Array("d", np.zeros(event.npos * event.maxnimpos)) rot = Array("d", np.zeros(event.npos * event.maxnimpos)) noisepix = Array("d", np.zeros(event.npos * event.maxnimpos)) flux = Array("d", np.zeros(event.npos * event.maxnimpos)) sky = Array("d", np.zeros(event.npos * event.maxnimpos)) goodfit = Array("d", np.zeros(event.npos * event.maxnimpos)) # Size of chunk of data each core will process: chunksize = event.maxnimpos // event.ccores + 1 print("Number of cores: " + str(event.ccores)) # Start Muti Procecess: :::::::::::::::::::::::::::::::::::::: processes = [] for nc in range(event.ccores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_center, args=(start, end, event, centermask, log, x, y, flux, sky, goodfit, xerr, yerr, xsig, ysig, noisepix, rot)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ccores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.x = np.asarray(x).reshape(event.npos, event.maxnimpos) event.fp.y = np.asarray(y).reshape(event.npos, event.maxnimpos) event.fp.xerr = np.asarray(xerr).reshape(event.npos, event.maxnimpos) event.fp.yerr = np.asarray(yerr).reshape(event.npos, event.maxnimpos) event.fp.noisepix = np.asarray(noisepix).reshape(event.npos, event.maxnimpos) # If Gaussian fit: if event.method == 'fgc' or event.method == 'rfgc': event.fp.xsig = np.asarray(xsig).reshape(event.npos, event.maxnimpos) event.fp.ysig = np.asarray(ysig).reshape(event.npos, event.maxnimpos) event.fp.rot = np.asarray(rot).reshape(event.npos, event.maxnimpos) # If PSF fit: if event.method in ["ipf", "bpf"]: event.fp.flux = np.asarray(flux).reshape(event.npos, event.maxnimpos) event.fp.psfsky = np.asarray(sky).reshape(event.npos, event.maxnimpos) event.fp.goodfit = np.asarray(goodfit).reshape(event.npos, event.maxnimpos) # :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: # Pixel R position: event.fp.r = np.sqrt((event.fp.x % 1.0 - 0.5)**2.0 + (event.fp.y % 1.0 - 0.5)**2.0) log.writelog("End frames centering.") # Save print("\nSaving") if event.denoised: me.saveevent(event, event.eventname + "_ctr", save=['dendata', 'data', 'uncd', 'mask']) else: me.saveevent(event, event.eventname + "_ctr", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() log.writelog("Output files (" + event.centerdir + "):") log.writelog("Data:") log.writelog(" " + cwd + '/' + event.eventname + "_ctr.dat") log.writelog(" " + cwd + '/' + event.eventname + "_ctr.h5") log.writelog("Log:") log.writelog(" " + cwd + '/' + event.logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Centering. Time (h:m:s): %s" % dt + " (" + event.centerdir + ")") print("------------- ------------\n") os.chdir(owd) if event.runp4: os.system("python3 poet.py p4 %s" % event.centerdir)
def centering(event, pcf, centerdir): tini = time.time() # Create centering log logname = event.logname log = le.Logedit(centerdir + "/" + logname, logname) log.writelog("\nStart " + centerdir + " centering: " + time.ctime()) os.chdir(centerdir) # copy center.pcf in centerdir pcf.make_file("center.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key).get()) # Check least asym parameters work: if event.method in ['lac', 'lag']: if event.ctrim < (event.cradius + event.csize) and event.ctrim is not 0: event.ctrim = event.cradius + event.csize + 1 log.writelog('Trim radius is too small, changed to: %i'%event.ctrim) if event.psfctrim < (event.psfcrad + event.psfcsize) and event.psfctrim is not 0: event.psfctrim = event.psfcrad + event.psfcsize + 1 log.writelog('PSF Trim radius is too small, changed to: %i' %event.psfctrim) # Centering bad pixel mask: centermask = np.ones((event.ny, event.nx)) if event.ymask is not None: ymask = np.asarray(event.ymask, int) xmask = np.asarray(event.xmask, int) for i in np.arange(len(ymask)): centermask[ymask[i], xmask[i]] = 0 # PSF: # Re-evaluate if a PSF has been redefined: if event.newpsf is not None: event.ispsf = os.path.isfile(event.newpsf) if event.ispsf: event.psffile = event.newpsf log.writelog('The PSF file has been redefined!') log.writelog("PSF: " + event.psffile) # PSF Centering: if event.ispsf: event.psfim = pf.getdata(event.psffile) # Guess of the center of the PSF (center of psfim) psfctrguess = np.asarray(np.shape(event.psfim))/2 # Do not find center of PSF: if event.nopsfctr: event.psfctr = psfctrguess # Find center of PSF: else: ''' if event.method == "bpf" or event.method == "ipf": method = "fgc" else: method = event.method event.psfctr, extra = cd.centerdriver(method, event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize) ''' # Always use 'fgc' on PSF, for testing event.psfctr, extra = cd.centerdriver("fgc", event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize) #FINDME log.writelog('PSF center found.') print(event.psfctr) #FINDME else: event.psfim = None event.psfctr = None log.writelog('No PSF supplied.') # Find center of the mean Image: event.targpos = np.zeros((2, event.npos)) for pos in np.arange(event.npos): meanim = event.meanim[:,:,pos] guess = event.srcest[:, pos] targpos, extra = cd.centerdriver(event.method, meanim, guess, event.ctrim, event.cradius, event.csize, fitbg=event.fitbg, psf=event.psfim, psfctr=event.psfctr, expand=event.expand) event.targpos[:,pos] = targpos log.writelog("Center position(s) of the mean Image(s):\n" + str(np.transpose(event.targpos))) # Inclusion :::::::: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( event.maxnimpos = int(event.maxnimpos) event.npos = int(event.npos) x = Array("d", np.zeros(event.npos * event.maxnimpos)) y = Array("d", np.zeros(event.npos * event.maxnimpos)) sx = Array("d", np.zeros(event.npos * event.maxnimpos)) sy = Array("d", np.zeros(event.npos * event.maxnimpos)) flux = Array("d", np.zeros(event.npos * event.maxnimpos)) sky = Array("d", np.zeros(event.npos * event.maxnimpos)) goodfit = Array("d", np.zeros(event.npos * event.maxnimpos)) # Size of chunk of data each core will process: chunksize = event.maxnimpos/event.ccores + 1 print("Number of cores: " + str(event.ccores)) # Start Muti Procecess: :::::::::::::::::::::::::::::::::::::: processes = [] for nc in np.arange(event.ccores): start = nc * chunksize # Starting index to process end = (nc+1) * chunksize # Ending index to process proc = Process(target=do_center, args=(start, end, event, centermask, log, x, y, sx, sy, flux, sky, goodfit)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in np.arange(event.ccores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.x = np.asarray(x ).reshape(event.npos,event.maxnimpos) event.fp.y = np.asarray(y ).reshape(event.npos,event.maxnimpos) event.fp.sx = np.asarray(sx ).reshape(event.npos,event.maxnimpos) event.fp.sy = np.asarray(sy ).reshape(event.npos,event.maxnimpos) # If PSF fit: if event.method in ["ipf", "bpf"]: event.fp.flux = np.asarray(flux ).reshape(event.npos,event.maxnimpos) event.fp.psfsky = np.asarray(sky ).reshape(event.npos,event.maxnimpos) event.fp.goodfit = np.asarray(goodfit).reshape(event.npos,event.maxnimpos) # :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: # Pixel R position: event.fp.r = np.sqrt((event.fp.x % 1.0 - 0.5)**2.0 + (event.fp.y % 1.0 - 0.5)**2.0 ) log.writelog("End frames centering.") # Save print("\nSaving") if event.denoised: me.saveevent(event, event.eventname + "_ctr", save=['dendata', 'data', 'uncd', 'mask']) else: me.saveevent(event, event.eventname + "_ctr", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.centerdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_ctr.dat") log.writelog(" " + cwd + event.eventname + "_ctr.h5") log.writelog("Log:") log.writelog(" " + cwd + event.logname) dt = t.hms_time(time.time()-tini) log.writeclose("\nEnd Centering. Time (h:m:s): %s"%dt + " (" + event.centerdir + ")") print("------------- ------------\n") if hasattr(event, 'runp4') and event.runp4 == True: os.chdir(event.eventdir) os.system("poet.py p4 %s/"%event.centerdir)
def badpix(eventname, cwd): """ Modification History: --------------------- 2010-??-?? patricio Initial Python implementation 2014-08-13 garland switched the pyfits package to astropy.io.fits [email protected] 2017-06-20 zacchaeus Fixed None comparisons [email protected] """ owd = os.getcwd() os.chdir(cwd) tini = time.time() # Load the event event = me.loadevent(eventname) # Load the data me.updateevent(event, eventname, event.loadnext) # Create a new log starting from the old one. oldlogname = event.logname logname = event.eventname + ".log" log = le.Logedit(logname, oldlogname) event.logname = logname log.writelog('\nMARK: ' + time.ctime() + ': Starting p2badpix.') # ccampo 3/18/2011: do this in p5 # Julian observation date #event.fp.juldat = event.jdjf80 + event.fp.time / 86400.0 # ::::::::::::::::::::::: UNCERTAINTIES :::::::::::::::::::::::::::::::: # IRAC subarray data come with bogus uncertainties that are not linearly # related to photon noise. We scale them later, using the reduced chi # squared from the model fit. # ::::::::::::::::::::::: FLUX CONVERSION ::::::::::::::::::::::::::::: # Do we want flux (uJy/pix) or surface brightness (MJy/sr) units? If # doing photometry, convert to flux. Since we care about relative # numbers, it doesn't really matter. # Convert from surface brightness (MJy/sr) to flux units (uJy/pix) if event.fluxunits: log.writelog('Converting surface brightness to flux') event.data, event.uncd = btf.poet_bright2flux(event.data, event.uncd, event.posscl) if event.havepreflash: event.predata, event.preuncd = btf.poet_bright2flux( event.predata, event.preuncd, event.posscl) if event.havepostcal: event.postdata, event.postuncd = btf.poet_bright2flux( event.postdata, event.postuncd, event.posscl) else: log.writelog('Did not convert bright to flux.') # Mean Background Estimate, from zodi model event.estbg = (np.mean(event.fp.zodi[np.where(event.fp.exist)]) + np.mean(event.fp.ism[np.where(event.fp.exist)]) + np.mean(event.fp.cib[np.where(event.fp.exist)])) if event.fluxunits: event.estbg *= (event.srperas * 1e12 * np.mean(event.posscl[0, :]) * np.mean(event.posscl[1, :])) # Bad Pixel Masking log.writelog('Find and fix bad pixels') # Get permanent bad pixel mask. if not event.ispmask[0]: log.writelog('\nPermanent Bad pixel mask not found!') else: hdu = fits.open(event.pmaskfile[0]) if hdu[0].header['bitpix'] == -32: # if data type is float hdu[0].scale(type='int16') # cast it down to int16 event.pmask = hdu[0].data # IRS FIX: # IRS data contains the blue peak subarray while its pmask contains # the whole array (Hard coding) if event.photchan == 5: event.pmask = event.pmask[3:59, 86:127] # Do NOT define sigma, we have a different scheme for finding baddies # adds Spitzer rejects: fp.nsstrej & our rejects: fp.nsigrej event.mask = pbm.poet_badmask(event.data, event.uncd, event.pmask, event.inst.pcrit, event.bdmskd, event.inst.dcrit, event.fp, nimpos=event.nimpos) # User rejected pixels: if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.mask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.fp.userrej = np.sum(np.sum(1 - event.mask, axis=1), axis=1) event.fp.userrej = np.transpose(event.fp.userrej) - event.fp.nsstrej else: event.fp.userrej = np.zeros((event.npos, event.maxnimpos)) # define sigma here. # adds median sky: fp.medsky event.meanim = pcb.poet_chunkbad(event.data, event.uncd, event.mask, event.nimpos, event.sigma, event.szchunk, event.fp, event.nscyc) log.writelog('Masks combined') # Repeat procedure for preflash and postcal data: if event.havepreflash: event.premask = pbm.poet_badmask(event.predata, event.preuncd, event.pmask, event.inst.pcrit, event.prebdmskd, event.inst.dcrit, event.prefp, nimpos=event.prenimpos) if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.premask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.prefp.userrej = np.sum(np.sum(1 - event.premask, axis=1), axis=1) event.prefp.userrej = np.transpose( event.prefp.userrej) - event.prefp.nsstrej else: event.prefp.userrej = np.zeros((event.npos, event.premaxnimpos)) event.premeanim = pcb.poet_chunkbad(event.predata, event.preuncd, event.premask, event.prenimpos, event.sigma, event.szchunk, event.prefp, event.nscyc) if event.havepostcal: event.postmask = pbm.poet_badmask(event.postdata, event.postuncd, event.pmask, event.inst.pcrit, event.postbdmskd, event.inst.dcrit, event.postfp, nimpos=event.postnimpos) if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.postmask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.postfp.userrej = np.sum(np.sum(1 - event.postmask, axis=1), axis=1) event.postfp.userrej = np.transpose(event.postfp.userrej) - \ event.postfp.nsstrej else: event.postfp.userrej = np.zeros((event.npos, event.postmaxnimpos)) event.postmeanim = pcb.poet_chunkbad(event.postdata, event.postuncd, event.postmask, event.postnimpos, event.sigma, event.szchunk, event.postfp, event.nscyc) for pos in range(event.npos): fits.writeto(event.eventname + "_medpostcal.fits", event.postmeanim[:, :, pos], clobber=True) # Delete post calibration data: event.havepostcal = False del (event.postdata) del (event.postmask) del (event.postuncd) del (event.postbdmskd) # Save the data if event.instrument == 'mips': todel = ['bdmskd', 'brmskd'] # what to delete else: todel = ['bdmskd'] me.saveevent(event, event.eventname + "_bpm", save=['data', 'uncd', 'mask'], delete=todel) # Print time elapsed and close log: log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + '/' + event.eventname + "_bpm.dat") log.writelog(" " + cwd + '/' + event.eventname + "_bpm.h5") log.writelog("Log:") log.writelog(" " + cwd + '/' + logname) dt = t.hms_time(time.time() - tini) log.writeclose('\nBad pixel masking time (h:m:s): %s ' % dt) os.chdir(owd) if event.runp3: #poet.p(3) os.system("python3 poet.py p3")
def photometry(event, pcf, photdir, mute, owd): tini = time.time() # Create photometry log logname = event.logname log = le.Logedit(photdir + "/" + logname, logname) log.writelog("\nStart " + photdir + " photometry: " + time.ctime()) parentdir = os.getcwd() + "/" os.chdir(photdir) # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key)) maxnimpos, npos = event.maxnimpos, event.npos # allocating frame parameters: event.fp.aplev = np.zeros((npos, maxnimpos)) event.fp.aperr = np.zeros((npos, maxnimpos)) event.fp.nappix = np.zeros((npos, maxnimpos)) event.fp.skylev = np.zeros((npos, maxnimpos)) event.fp.skyerr = np.zeros((npos, maxnimpos)) event.fp.nskypix = np.zeros((npos, maxnimpos)) event.fp.nskyideal = np.zeros((npos, maxnimpos)) event.fp.status = np.zeros((npos, maxnimpos)) event.fp.good = np.zeros((npos, maxnimpos)) # For interpolated aperture photometry, we need to "interpolate" the # mask, which requires float values. Thus, we convert the mask to # floats (this needs to be done before processes are spawned or memory # usage balloons). if event.mask.dtype != float: event.mask = event.mask.astype(float) # Aperture photometry: if event.phottype == "aper": # not event.dooptimal or event.from_aper is None: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 event.aparr = np.ones(npos * maxnimpos) * event.photap + event.offset print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "var": # variable aperture radius # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 event.aparr = event.fp.noisepix[0]**.5 * event.photap + event.offset print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "ell": # elliptical # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "psffit": event.fp.aplev = event.fp.flux event.fp.skylev = event.fp.psfsky event.fp.good = np.zeros((event.npos, event.maxnimpos)) for pos in range(event.npos): event.fp.good[pos, 0:event.nimpos[pos]] = 1 elif event.phottype == "optimal": # utils for profile construction: pshape = np.array([2 * event.otrim + 1, 2 * event.otrim + 1]) subpsf = np.zeros(np.asarray(pshape, int) * event.expand) x = np.indices(pshape) clock = t.Timer(np.sum(event.nimpos), progress=np.array([0.05, 0.1, 0.25, 0.5, 0.75, 1.1])) for pos in range(npos): for i in range(event.nimpos[pos]): # Integer part of center of subimage: cen = np.rint([event.fp.y[pos, i], event.fp.x[pos, i]]) # Center in the trimed image: loc = (event.otrim, event.otrim) # Do the trim: img, msk, err = ie.trimimage(event.data[i, :, :, pos], *cen, *loc, mask=event.mask[i, :, :, pos], uncd=event.uncd[i, :, :, pos]) # Center of star in the subimage: ctr = (event.fp.y[pos, i] - cen[0] + event.otrim, event.fp.x[pos, i] - cen[1] + event.otrim) # Make profile: # Index of the position in the supersampled PSF: pix = pf.pos2index(ctr, event.expand) profile, pctr = pf.make_psf_binning(event.psfim, pshape, event.expand, [pix[0], pix[1], 1.0, 0.0], event.psfctr, subpsf) #subtract the sky level: img -= event.fp.psfsky[pos, i] # optimal photometry calculation: immean, uncert, good = op.optphot(img, profile, var=err**2.0, mask=msk) event.fp.aplev[pos, i] = immean event.fp.aperr[pos, i] = uncert event.fp.skylev[pos, i] = event.fp.psfsky[pos, i] event.fp.good[pos, i] = good # Report progress: clock.check(np.sum(event.nimpos[0:pos]) + i, name=event.centerdir) # START PREFLASH EDIT ::::::::::::::::::::::::::::::::::::: # Do aperture on preflash data: if event.havepreflash: print("\nStart preflash photometry:") premaxnimpos = event.premaxnimpos preaplev = Array("d", np.zeros(npos * premaxnimpos)) preaperr = Array("d", np.zeros(npos * premaxnimpos)) prenappix = Array("d", np.zeros(npos * premaxnimpos)) preskylev = Array("d", np.zeros(npos * premaxnimpos)) preskyerr = Array("d", np.zeros(npos * premaxnimpos)) preskynpix = Array("d", np.zeros(npos * premaxnimpos)) preskyideal = Array("d", np.zeros(npos * premaxnimpos)) prestatus = Array("d", np.zeros(npos * premaxnimpos)) pregood = Array("d", np.zeros(npos * premaxnimpos)) # Start Procecess: mute = False proc = Process(target=do_aphot, args=(0, event.prenimpos[0], event, log, mute, preaplev, preaperr, prenappix, preskylev, preskyerr, preskynpix, preskyideal, prestatus, pregood, 1)) proc.start() proc.join() # Put the results in the event. I need to reshape them: event.prefp.aplev = np.asarray(preaplev).reshape(npos, premaxnimpos) event.prefp.aperr = np.asarray(preaperr).reshape(npos, premaxnimpos) event.prefp.nappix = np.asarray(prenappix).reshape(npos, premaxnimpos) event.prefp.status = np.asarray(prestatus).reshape(npos, premaxnimpos) event.prefp.skylev = np.asarray(preskylev).reshape(npos, premaxnimpos) event.prefp.good = np.asarray(pregood).reshape(npos, premaxnimpos) # raw photometry (no sky subtraction): event.prefp.aplev = (event.prefp.aplev + (event.prefp.skylev * event.prefp.nappix)) # END PREFLASH EDIT ::::::::::::::::::::::::::::::::::::::: if event.method in ["bpf"]: event.ispsf = False # PSF aperture correction: if event.ispsf and event.phottype == "aper": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], event.photap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) if event.ispsf and event.phottype == "var": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 avgap = np.mean(event.aparr) event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], avgap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) if event.ispsf and event.phottype == "ell": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 avgxwid = np.mean(event.fp.xsig * event.photap) avgywid = np.mean(event.fp.ysig * event.photap) avgrot = np.mean(event.fp.rot) event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.elphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], avgxwid * event.psfexpand, avgywid * event.psfexpand, avgrot, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) # Sadly we must do photometry for every aperture used # Possibly use a range and interpolate? Might be an option # for the future to speed this up. # This is commented out, as it seems to just remove the corrections # made by variable or elliptical photometry # if event.ispsf and (event.phottype == "var" or event.phottype == "ell"): # log.writelog('Calculating PSF aperture. This may take some time.') # event.psfim = event.psfim.astype(np.float64) # imerr = np.ones(np.shape(event.psfim)) # imask = np.ones(np.shape(event.psfim)) # skyfrac = 0.1 # aperfrac = Array("d", np.zeros(npos*maxnimpos))# psf flux # aperfracerr = Array("d", np.zeros(npos*maxnimpos))# psf flux error # psfnappix = Array("d", np.zeros(npos*maxnimpos))# psf aperture pix num # psfsky = Array("d", np.zeros(npos*maxnimpos))# psf sky level # psfskyerr = Array("d", np.zeros(npos*maxnimpos))# psf sky error # psfnskypix = Array("d", np.zeros(npos*maxnimpos))# psf sky pix num # psfnskyideal = Array("d", np.zeros(npos*maxnimpos))# psf ideal sky pix num # psfstatus = Array("d", np.zeros(npos*maxnimpos))# psf return status # psfgood = Array("d", np.zeros(npos*maxnimpos))# psf good flag # processes=[] # for nc in range(event.ncores): # start = nc * chunksize # end = (nc+1) * chunksize # proc = Process(target=do_aphot_psf, args=(start, end, event, log, mute, # aperfrac, aperfracerr, # psfnappix, # psfsky, psfskyerr, # psfnskypix, psfnskyideal, # psfstatus, psfgood)) # processes.append(proc) # proc.start() # for nc in range(event.ncores): # processes[nc].join() # # Reshape # event.aperfrac = np.asarray(aperfrac ).reshape(npos,maxnimpos) # event.aperfracerr = np.asarray(aperfracerr ).reshape(npos,maxnimpos) # event.psfnappix = np.asarray(psfnappix ).reshape(npos,maxnimpos) # event.psfsky = np.asarray(psfsky ).reshape(npos,maxnimpos) # event.psfskyerr = np.asarray(psfskyerr ).reshape(npos,maxnimpos) # event.psfnskypix = np.asarray(psfnskypix ).reshape(npos,maxnimpos) # event.psfnskyideal = np.asarray(psfnskyideal).reshape(npos,maxnimpos) # event.psfstatus = np.asarray(psfstatus ).reshape(npos,maxnimpos) # event.psfgood = np.asarray(psfgood ).reshape(npos,maxnimpos) # event.aperfrac += event.psfsky * event.psfnappix # event.fp.aplev /= event.aperfrac # event.fp.aperr /= event.aperfrac # log.writelog('Aperture contains average %f of PSF.'%np.mean(event.aperfrac)) # save print("\nSaving ...") # denoised data: if event.denphot: killdata = 'dendata' else: killdata = 'data' me.saveevent(event, event.eventname + "_pht", delete=[killdata, 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.photdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_pht.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Photometry. Time (h:m:s): %s " % dt + " (" + photdir + ")") print("-------------- ------------\n") os.chdir(owd) if event.runp5: os.system("python3 poet.py p5 %s/%s" % (event.centerdir, event.photdir))
def badpix(eventname, control=None): tini = time.time() # Load the event event = me.loadevent(eventname) # Load the data me.updateevent(event, eventname, event.loadnext) # Create a new log starting from the old one. oldlogname = event.logname logname = event.eventname + ".log" log = le.Logedit(logname, oldlogname) event.logname = logname log.writelog('\nMARK: ' + time.ctime() + ': Starting poet_2badpix.') # ccampo 3/18/2011: do this in p5 # Julian observation date #event.fp.juldat = event.jdjf80 + event.fp.time / 86400.0 # ::::::::::::::::::::::: UNCERTAINTIES :::::::::::::::::::::::::::::::: # IRAC subarray data come with bogus uncertainties that are not linearly # related to photon noise. We scale them later, using the reduced chi # squared from the model fit. # ::::::::::::::::::::::: FLUX CONVERSION ::::::::::::::::::::::::::::: # Do we want flux (uJy/pix) or surface brightness (MJy/sr) units? If # doing photometry, convert to flux. Since we care about relative # numbers, it doesn't really matter. # Convert from surface brightness (MJy/sr) to flux units (uJy/pix) if event.fluxunits: log.writelog('Converting surface brightness to flux') event.data, event.uncd = btf.poet_bright2flux(event.data, event.uncd, event.posscl) if event.havecalaor: event.predata, event.preuncd = btf.poet_bright2flux( event.predata, event.preuncd, event.posscl) event.postdata, event.postuncd = btf.poet_bright2flux( event.postdata, event.postuncd, event.posscl) else: log.writelog('Did not convert bright to flux.') # Mean Background Estimate, from zodi model event.estbg = (np.mean(event.fp.zodi[np.where(event.fp.exist)]) + np.mean(event.fp.ism[np.where(event.fp.exist)]) + np.mean(event.fp.cib[np.where(event.fp.exist)])) if event.fluxunits: event.estbg *= (event.srperas * 1e12 * np.mean(event.posscl[0, :]) * np.mean(event.posscl[1, :])) # Bad Pixel Masking log.writelog('Find and fix bad pixels') # Get permanent bad pixel mask. if not event.ispmask[0]: log.writelog('\nPermanent Bad pixel mask not found!') else: hdu = pf.open(str(event.pmaskfile[0].decode('utf-8'))) if hdu[0].header['bitpix'] == -32: # if data type is float hdu[0].scale(type='int16') # cast it down to int16 event.pmask = hdu[0].data # IRS FIX: # IRS data contains the blue peak subarray while its pmask contains # the whole array (Hard coding) if event.photchan == 5: event.pmask = event.pmask[3:59, 86:127] # Do NOT define sigma, we have a different scheme for finding baddies # adds Spitzer rejects: fp.nsstrej & our rejects: fp.nsigrej event.mask = pbm.poet_badmask(event.data, event.uncd, event.pmask, event.inst.pcrit, event.bdmskd, event.inst.dcrit, event.fp, nimpos=event.nimpos) # User rejected pixels: if event.userrej != None: for i in np.arange(np.shape(event.userrej)[0]): event.mask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.fp.userrej = np.sum(np.sum(1 - event.mask, axis=1), axis=1) event.fp.userrej = np.transpose(event.fp.userrej) - event.fp.nsstrej else: event.fp.userrej = np.zeros((int(event.npos), int(event.maxnimpos)), dtype=int) # define sigma here. # adds median sky: fp.medsky event.meanim = pcb.poet_chunkbad(event.data, event.uncd, event.mask, event.nimpos, event.sigma, event.szchunk, event.fp, event.nscyc) log.writelog('Masks combined') if event.havecalaor: event.premask = pbm.poet_badmask(event.predata, event.preuncd, event.pmask, event.inst.pcrit, event.prebdmskd, event.inst.dcrit, event.prefp, nimpos=event.calnimpos) event.premeanim = pcb.poet_chunkbad(event.predata, event.preuncd, event.premask, event.calnimpos, event.sigma, event.szchunk, event.prefp, event.nscyc) event.postmask = pbm.poet_badmask(event.postdata, event.postuncd, event.pmask, event.inst.pcrit, event.postbdmskd, event.inst.dcrit, event.postfp, nimpos=event.calnimpos) event.postmeanim = pcb.poet_chunkbad(event.postdata, event.postuncd, event.postmask, event.calnimpos, event.sigma, event.szchunk, event.postfp, event.nscyc) # Save the data if event.instrument == 'mips': todel = ['bdmskd', 'brmskd'] # what to delete else: todel = ['bdmskd'] me.saveevent(event, event.eventname + "_bpm", save=['data', 'uncd', 'mask'], delete=todel) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_bpm.dat") log.writelog(" " + cwd + event.eventname + "_bpm.h5") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose('\nBad pixel masking time (h:m:s): %s ' % dt)
def denoise(pcf, denoisedir): tini = time.time() # Create denoising log logname = event.logname log = le.Logedit(denoisedir + "/" + logname, logname) log.writelog("\nStart " + denoisedir + " denoising: " + time.ctime()) os.chdir(denoisedir) # copy denoise.pcf in denoisedir pcf.make_file("denoise.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: if key != 'srcest': setattr(event, key, attrib.get(key).get()) for pos in range(event.npos): # Plot histogram of noisy wavelet coefficients ylim = histwc(event, event.wavelet, event.numlvls + 1, pos, log=log, denoised=False) # Plot first 'length' frames of noisy lightcurve at pixel srcest plotlc(event, pos, length=200, denoised=False) ''' maxlvls = pywt.dwt_max_level(event.nimpos[pos], pywt.Wavelet(event.wavelet)) # Determine the number of levels to denoise for i in range(1,maxlvls+1): if (2**i)*event.framtime < event.maxtime: numlvls = i else: break ''' log.writelog("Denoising will occur on the lowest " + str(event.numlvls) + " levels at position " + str(pos) + ".") # Determine the time resolution of the highled denoised level timeres = 2**(event.numlvls) * event.framtime log.writelog("Time resolution for position " + str(pos) + ", level " + str(event.numlvls) + " is " + str(timeres) + " seconds.") # Assess presence of NaNs and Infs in masked data print("Checking for NaNs and Infs.") data = (event.data[:, :, :, pos])[np.where(event.mask[:, :, :, pos])] if (np.sum(np.isnan(data)) + np.sum(np.isinf(data))) > 0: log.writelog( "***WARNING: Found NaNs and/or Infs in masked data at position " + str(pos) + ".") del (data) pool = mp.Pool(event.ncpu) for i in range(event.nx): for j in range(event.ny): #res=bayesshrink((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]) #writedata(res) exec( 'res = pool.apply_async(' + event.threshold + ',((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata)' ) #res = exec('pool.apply_async(' + event.threshold + ',((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata)') #res = pool.apply_async(event.threshold,((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata) pool.close() pool.join() #res.wait() #Plot histogram of denoised wavelet coefficients histwc(event, event.wavelet, event.numlvls + 1, pos, log=log, denoised=True, ylim=ylim) # Plot first 'length' frames of denoised lightcurve at pixel srcest plotlc(event, pos, length=200, denoised=True) # Save print("\nFinished Denoising. Saving.") me.saveevent(event, event.eventname + "_den", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.denoisedir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_den.dat") log.writelog(" " + cwd + event.eventname + "_den.h5") log.writelog("Log:") log.writelog(" " + cwd + event.logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Denoising. Time (h:m:s): %s" % dt + " (" + event.denoisedir + ")") print("------------- ------------\n") return