def __init__(self, eventpcf): tini = time.time() # Open new log based on the file name logname = eventpcf[:-4] + "ini.log" global log log = le.Logedit(logname) self.logname = logname # initialize Univ Univ.__init__(self) pcf = rd.read_pcf(eventpcf) self.initpars(pcf) self.calc(pcf) self.read() self.check() self.save() # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("\nOutput files:") log.writelog("Data:") log.writelog(" " + cwd + self.eventname + "_ini.dat") log.writelog(" " + cwd + self.eventname + "_ini.h5") log.writelog("Log:") log.writelog(" " + cwd + logname) log.writelog("Figures:") log.writelog(" " + cwd + self.eventname + "-fig101.png") dt = t.hms_time(time.time() - tini) log.writeclose('\nEnd init and read. Time (h:m:s): %s' % dt)
def __init__(self, eventpcf, cwd): owd = os.getcwd() os.chdir(cwd) tini = time.time() # Open new log based on the file name logname = eventpcf[:-4] + "_ini.log" log = le.Logedit(logname) self.logname = logname # initialize Univ Univ.__init__(self) pcf, = rd.read_pcf(eventpcf, 'event', expand=False) self.initpars(pcf, log) self.calc(pcf, log) self.read(log) self.check(log) self.save() # Print time elapsed and close log: log.writelog("\nOutput files:") log.writelog("Data:") log.writelog(" " + cwd + '/' + self.eventname + "_ini.dat") log.writelog(" " + cwd + '/' + self.eventname + "_ini.h5") log.writelog("Log:") log.writelog(" " + logname) log.writelog("Figures:") log.writelog(" " + cwd + '/' + self.eventname + "-fig101.png") dt = t.hms_time(time.time() - tini) log.writeclose('\nEnd init and read. Time (h:m:s): %s' % dt) os.chdir(owd) if self.runp2: os.system("python3 poet.py p2")
def photometry(event, pcf, photdir, mute): tini = time.time() # Create photometry log logname = event.logname log = le.Logedit(photdir + "/" + logname, logname) log.writelog("\nStart " + photdir + " photometry: " + time.ctime()) parentdir = os.getcwd() + "/" os.chdir(photdir) # copy photom.pcf in photdir pcf.make_file("photom.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key).get()) maxnimpos, npos = event.maxnimpos, event.npos # allocating frame parameters: event.fp.aplev = np.zeros((npos, maxnimpos)) # aperture flux event.fp.aperr = np.zeros((npos, maxnimpos)) # aperture error event.fp.nappix = np.zeros((npos, maxnimpos)) # number of aperture pixels event.fp.skylev = np.zeros((npos, maxnimpos)) # background sky flux level event.fp.skyerr = np.zeros((npos, maxnimpos)) # sky error event.fp.nskypix = np.zeros((npos, maxnimpos)) # number of sky pixels event.fp.nskyideal = np.zeros( (npos, maxnimpos)) # ideal number of sky pixels event.fp.status = np.zeros((npos, maxnimpos)) # apphot return status event.fp.good = np.zeros((npos, maxnimpos)) # good flag # Aperture photometry: if not event.dooptimal or event.from_aper == None: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) aperr = Array("d", np.zeros(npos * maxnimpos)) nappix = Array("d", np.zeros(npos * maxnimpos)) skylev = Array("d", np.zeros(npos * maxnimpos)) skyerr = Array("d", np.zeros(npos * maxnimpos)) nskypix = Array("d", np.zeros(npos * maxnimpos)) nskyideal = Array("d", np.zeros(npos * maxnimpos)) status = Array("d", np.zeros(npos * maxnimpos)) good = Array("d", np.zeros(npos * maxnimpos)) # Size of chunk of data each core will process: chunksize = maxnimpos / event.ncores + 1 print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in np.arange(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in np.arange(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wans't done before: for pos in np.arange(npos): for i in np.arange(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.from_aper != None: # Load previous aperture photometry if required for optimal: evt = me.loadevent(parentdir + event.from_aper + "/" + event.eventname + "_pht") event.fp.aplev = evt.fp.aplev event.fp.aperr = evt.fp.aperr event.fp.nappix = evt.fp.nappix event.fp.skylev = evt.fp.skylev event.fp.skyerr = evt.fp.skyerr event.fp.nskypix = evt.fp.nskypix event.fp.nskyideal = evt.fp.nskyideal event.fp.status = evt.fp.status event.fp.good = evt.fp.good event.fp.apraw = evt.fp.apraw if event.dooptimal: ofp, psf = do.dooptphot(event.data, event.uncd, event.mask, event.fp, event.srcest, event.nimpos, rejlim=[10.45, 1000, 1.5], order=1, resize=event.oresize, norm=1, trim=event.otrim, log=log) event.fp = ofp event.psf = psf elif event.ispsf: # PSF aperture correction: log.writelog('Calculating PSF aperture:') event.aperfrac, event.psfnappix, event.psfskylev, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot(event.psfim, event.psfctr, event.photap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, med = event.skymed, expand = event.apscale, nappix = True, skylev = True, nskypix = True, nskyideal = True, status = True) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) # For running pixel-level decorrelation (pld) if event.ispld and event.npos == 1: event.apdata = pld.pld_box(event.data, event.targpos, event.pldhw, event.fp.skylev) log.writelog( "Created " + str(event.pldhw * 2 + 1) + "x" + str(event.pldhw * 2 + 1) + " box around centroid for pixel-level decorrelation and normalized it in time." ) elif event.ispld and event.npos != 1: log.writelog( "Could not perform pixel-level decorrelation because there is more than 1 nod position." ) # save print("\nSaving ...") me.saveevent(event, event.eventname + "_pht", delete=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.photdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_pht.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Photometry. Time (h:m:s): %s " % dt + " (" + photdir + ")") print("-------------- ------------\n")
def centering(event, pcf, centerdir, owd): os.chdir(centerdir) tini = time.time() # Create centering log log = le.Logedit(event.logname, event.logname) log.writelog("\nStart " + centerdir + " centering: " + time.ctime()) # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key)) # Check least asym parameters work: if event.method in ['lac', 'lag']: if event.ctrim < (event.cradius + event.csize) and event.ctrim != 0: event.ctrim = event.cradius + event.csize + 1 log.writelog('Trim radius is too small, changed to: %i' % event.ctrim) if event.psfctrim < (event.psfcrad + event.psfcsize) and event.psfctrim != 0: event.psfctrim = event.psfcrad + event.psfcsize + 1 log.writelog('PSF Trim radius is too small, changed to: %i' % event.psfctrim) # Centering bad pixel mask: centermask = np.ones((event.ny, event.nx)) if event.ymask is not None: ymask = np.asarray(event.ymask, int) xmask = np.asarray(event.xmask, int) for i in range(len(ymask)): centermask[ymask[i], xmask[i]] = 0 # PSF: # Re-evaluate if a PSF has been redefined: if event.newpsf is not None: event.ispsf = os.path.isfile(event.newpsf) if event.ispsf: event.psffile = event.newpsf log.writelog('The PSF file has been redefined!') log.writelog("PSF: " + event.psffile) # PSF Centering: if event.ispsf: event.psfim = fits.getdata(event.psffile) # Guess of the center of the PSF (center of psfim) psfctrguess = np.asarray(np.shape(event.psfim)) // 2 # Do not find center of PSF: if event.nopsfctr: event.psfctr = psfctrguess # Find center of PSF: else: if event.method == "bpf" or event.method == "ipf": method = "fgc" else: method = event.method event.psfctr, extra = cd.centerdriver(method, event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize, npskyrad=(event.npskyin, event.npskyout)) log.writelog('PSF center found.') else: event.psfim = None event.psfctr = None log.writelog('No PSF supplied.') # Find center of the mean Image: event.targpos = np.zeros((2, event.npos)) # Override target position estimate if specified if type(pcf.srcesty) != type(None) and type(pcf.srcestx) != type(None): srcesty = str(pcf.srcesty).split(',') srcestx = str(pcf.srcestx).split(',') if len(srcestx) != len(srcesty): print("WARNING: Length of srcest inputs do not match!") if len(srcestx) != event.npos or len(srcesty) != event.npos: print("WARNING: Length of srcest inputs do not match npos!") if len(srcestx) > 1 or len(srcesty) > 1: print( "Verify that srcest override order matches telescope pos order." ) for pos in range(event.npos): event.srcest[0, pos] = srcesty[pos] event.srcest[1, pos] = srcestx[pos] for pos in range(event.npos): print("Fitting mean image at pos: " + str(pos)) meanim = event.meanim[:, :, pos] guess = event.srcest[:, pos] targpos, extra = cd.centerdriver(event.method, meanim, guess, event.ctrim, event.cradius, event.csize, fitbg=event.fitbg, psf=event.psfim, psfctr=event.psfctr, expand=event.expand, npskyrad=(event.npskyin, event.npskyout)) event.targpos[:, pos] = targpos log.writelog("Center position(s) of the mean Image(s):\n" + str(np.transpose(event.targpos))) # Inclusion :::::::: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( x = Array("d", np.zeros(event.npos * event.maxnimpos)) y = Array("d", np.zeros(event.npos * event.maxnimpos)) xerr = Array("d", np.zeros(event.npos * event.maxnimpos)) yerr = Array("d", np.zeros(event.npos * event.maxnimpos)) xsig = Array("d", np.zeros(event.npos * event.maxnimpos)) ysig = Array("d", np.zeros(event.npos * event.maxnimpos)) rot = Array("d", np.zeros(event.npos * event.maxnimpos)) noisepix = Array("d", np.zeros(event.npos * event.maxnimpos)) flux = Array("d", np.zeros(event.npos * event.maxnimpos)) sky = Array("d", np.zeros(event.npos * event.maxnimpos)) goodfit = Array("d", np.zeros(event.npos * event.maxnimpos)) # Size of chunk of data each core will process: chunksize = event.maxnimpos // event.ccores + 1 print("Number of cores: " + str(event.ccores)) # Start Muti Procecess: :::::::::::::::::::::::::::::::::::::: processes = [] for nc in range(event.ccores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_center, args=(start, end, event, centermask, log, x, y, flux, sky, goodfit, xerr, yerr, xsig, ysig, noisepix, rot)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ccores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.x = np.asarray(x).reshape(event.npos, event.maxnimpos) event.fp.y = np.asarray(y).reshape(event.npos, event.maxnimpos) event.fp.xerr = np.asarray(xerr).reshape(event.npos, event.maxnimpos) event.fp.yerr = np.asarray(yerr).reshape(event.npos, event.maxnimpos) event.fp.noisepix = np.asarray(noisepix).reshape(event.npos, event.maxnimpos) # If Gaussian fit: if event.method == 'fgc' or event.method == 'rfgc': event.fp.xsig = np.asarray(xsig).reshape(event.npos, event.maxnimpos) event.fp.ysig = np.asarray(ysig).reshape(event.npos, event.maxnimpos) event.fp.rot = np.asarray(rot).reshape(event.npos, event.maxnimpos) # If PSF fit: if event.method in ["ipf", "bpf"]: event.fp.flux = np.asarray(flux).reshape(event.npos, event.maxnimpos) event.fp.psfsky = np.asarray(sky).reshape(event.npos, event.maxnimpos) event.fp.goodfit = np.asarray(goodfit).reshape(event.npos, event.maxnimpos) # :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: # Pixel R position: event.fp.r = np.sqrt((event.fp.x % 1.0 - 0.5)**2.0 + (event.fp.y % 1.0 - 0.5)**2.0) log.writelog("End frames centering.") # Save print("\nSaving") if event.denoised: me.saveevent(event, event.eventname + "_ctr", save=['dendata', 'data', 'uncd', 'mask']) else: me.saveevent(event, event.eventname + "_ctr", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() log.writelog("Output files (" + event.centerdir + "):") log.writelog("Data:") log.writelog(" " + cwd + '/' + event.eventname + "_ctr.dat") log.writelog(" " + cwd + '/' + event.eventname + "_ctr.h5") log.writelog("Log:") log.writelog(" " + cwd + '/' + event.logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Centering. Time (h:m:s): %s" % dt + " (" + event.centerdir + ")") print("------------- ------------\n") os.chdir(owd) if event.runp4: os.system("python3 poet.py p4 %s" % event.centerdir)
def checks(eventname, period=None, ephtime=None, cwd=None): if cwd == None: cwd = os.getcwd() os.chdir(cwd) # Load the Event event = me.loadevent(eventname) # Create a log oldlogname = event.logname logname = event.eventname + "_p5.log" log = le.Logedit(logname, oldlogname) log.writelog('\nStart Checks: ' + time.ctime()) # If p5 run after p3: we are using results from PSFfit: if not hasattr(event, "phottype"): event.phottype = "psffit" try: os.mkdir("psffit/") except: pass os.chdir("psffit/") # Move frame parameters to fit Kevin's syntax: # event.fp.param --> event.param event.filenames = event.fp.filename event.x = event.fp.x event.y = event.fp.y event.sx = event.fp.sx event.sy = event.fp.sy event.time = event.fp.time event.pos = event.fp.pos event.frmvis = event.fp.frmvis event.filename = event.eventname if event.phottype == "aper": event.good = event.fp.good event.aplev = event.fp.aplev event.aperr = event.fp.aperr event.background = event.fp.skylev log.writelog('Photometry method is APERTURE') elif event.phottype == "psffit": event.aplev = event.fp.psfflux event.background = event.fp.psfsky # FINDME: do something with aperr and good event.aperr = 0.0025 * np.mean( event.fp.psfflux) * (event.aplev * 0 + 1) event.good = np.ones(np.shape(event.aplev)) log.writelog('Photometry method is PSF FITTING') elif event.phottype == "optimal": event.good = event.fp.ogood event.aplev = event.fp.ophotlev event.aperr = event.fp.ophoterr # FINDME: Background from optimal? event.background = event.fp.psfsky log.writelog('Photometry method is OPTIMAL') # UPDATE period AND ephtime if period != None: event.period = period[0] event.perioderr = period[1] if ephtime != None: event.ephtime = ephtime[0] event.ephtimeerr = ephtime[1] log.writelog("\nCurrent event = " + event.eventname) log.writelog("Kurucz file = " + event.kuruczfile) log.writelog("Filter file = " + event.filtfile) # Light-time correction to BJD: # Julian observation date #event.juldat = event.jdjf80 + event.fp.time / 86400.0 event.juldat = event.fp.juldat = event.j2kjd + event.fp.time / 86400.0 if not event.ishorvec: log.writeclose('\nHorizon file not found!') return print("Calculating BJD correction...") event.fp.bjdcor = stc.suntimecorr(event.ra, event.dec, event.fp.juldat, event.horvecfile) # Get bjd times: event.bjdcor = event.fp.bjdcor #event.bjddat = event.fp.juldat + event.fp.bjdcor / 86400.0 event.bjdutc = event.fp.juldat + event.fp.bjdcor / 86400.0 # utc bjd date event.bjdtdb = np.empty(event.bjdutc.shape) for i in range(event.bjdtdb.shape[0]): event.bjdtdb[i] = utc_tt.utc_tdb( event.bjdutc[i]) # terrestial bjd date # ccampo 3/18/2011: check which units phase should be in try: if event.tep.ttrans.unit == "BJDTDB": event.timestd = "tdb" event.fp.phase = tp.time2phase(event.bjdtdb, event.ephtime, event.period, event.ecltype) else: event.timestd = "utc" event.fp.phase = tp.time2phase(event.bjdutc, event.ephtime, event.period, event.ecltype) except: event.timestd = "utc" event.fp.phase = tp.time2phase(event.bjdutc, event.ephtime, event.period, event.ecltype) # assign phase variable event.phase = event.fp.phase # ccampo 3/18/2011: moved this above # Eclipse phase, BJD #event.fp.phase = tp.time2phase(event.fp.juldat + event.fp.bjdcor / 86400.0, # event.ephtime, event.period, event.ecltype) # verify leapsecond correction hfile = event.filenames[0, 0] try: image, event.header = pf.getdata(hfile.decode('utf-8'), header=True) dt = ((event.bjdtdb - event.bjdutc) * 86400.0)[0, 0] dt2 = event.header['ET_OBS'] - event.header['UTCS_OBS'] log.writelog('Leap second correction : ' + str(dt) + ' = ' + str(dt2)) except: log.writelog('Could not verify leap-second correction.') log.writelog('Min and Max light-time correction: ' + np.str(np.amin(event.fp.bjdcor)) + ', ' + np.str(np.amax(event.fp.bjdcor)) + ' seconds') # Verify light-time correction try: image, event.header = pf.getdata(hfile.decode('utf-8'), header=True) try: log.writelog('BJD Light-time correction: ' + str(event.bjdcor[0, 0]) + ' = ' + str((event.header['BMJD_OBS'] - event.header['MJD_OBS']) * 86400)) except: log.writelog('HJD Light-time correction: ' + str(event.bjdcor[0, 0]) + ' = ' + str((event.header['HMJD_OBS'] - event.header['MJD_OBS']) * 86400)) except: log.writelog('Could not verify light-time correction.') # Number of good frames should be > 95% log.writelog("Good Frames = %7.3f" % (np.mean(event.good) * 100) + " %") log.writelog('\nCentering: X mean X stddev Y mean Y stddev') for pos in np.arange(event.npos): log.writelog( 'position %2d:' % pos + ' %10.5f' % np.mean(event.x[pos, np.where(event.good[pos])]) + ' %9.5f' % np.std(event.x[pos, np.where(event.good[pos])]) + ' %10.5f' % np.mean(event.y[pos, np.where(event.good[pos])]) + ' %9.5f' % np.std(event.y[pos, np.where(event.good[pos])])) # COMPUTE RMS POSITION CONSISTENCY event.xprecision = np.sqrt(np.median(np.ediff1d(event.x)**2)) event.yprecision = np.sqrt(np.median(np.ediff1d(event.y)**2)) log.writelog('RMS of x precision = ' + str(np.round(event.xprecision, 4)) + ' pixels.') log.writelog('RMS of y precision = ' + str(np.round(event.yprecision, 4)) + ' pixels.') if event.phottype == "aper": log.writelog('\nCenter & photometry half-width/aperture sizes = ' + str(event.ctrim) + ', ' + str(event.photap) + ' pixels.') log.writelog('Period = ' + str(event.period) + ' +/- ' + str(event.perioderr) + ' days') log.writelog('Ephemeris = ' + str(event.ephtime) + ' +/- ' + str(event.ephtimeerr) + ' JD') fmt1 = [ 'C0o', 'C1o', 'C2o', 'ro', 'ko', 'co', 'mo', 'bs', 'gs', 'ys', 'rs', 'ks', 'cs', 'ms' ] fmt2 = ['b,', 'g,', 'y,', 'r,'] plt.figure(501) plt.clf() plt.figure(502, figsize=(8, 12)) plt.clf() plt.figure(503) plt.clf() plt.figure(504) plt.clf() plt.figure(505) plt.clf() plt.figure(506) plt.clf() for pos in np.arange(event.npos): wheregood = np.where(event.good[pos, :]) # CHOOSE ONLY GOOD FRAMES FOR PLOTTING phase = event.phase[pos, :][wheregood] aplev = event.aplev[pos, :][wheregood] jdtime = event.bjdutc[pos, :][wheregood] background = event.background[pos, :][wheregood] # COMPUTE X AND Y PIXEL LOCATION RELATIVE TO ... if event.npos > 1: # CENTER OF EACH PIXEL y = (event.y[pos, :] - np.round(event.y[pos, :]))[wheregood] x = (event.x[pos, :] - np.round(event.x[pos, :]))[wheregood] else: # CENTER OF MEDIAN PIXEL y = (event.y[pos, :] - np.round(np.median(event.y)))[wheregood] x = (event.x[pos, :] - np.round(np.median(event.x)))[wheregood] # SORT aplev BY x, y AND radial POSITIONS rad = np.sqrt(x**2 + y**2) xx = np.sort(x) yy = np.sort(y) sxx = np.sort(event.sx[0]) syy = np.sort(event.sy[0]) rr = np.sort(rad) xaplev = aplev[np.argsort(x)] yaplev = aplev[np.argsort(y)] raplev = aplev[np.argsort(rad)] # BIN RESULTS FOR PLOTTING POSITION SENSITIVITY EFFECT nobj = aplev.size nbins = int(120 / event.npos) binxx = np.zeros(nbins) binyy = np.zeros(nbins) binsxx = np.zeros(nbins) binsyy = np.zeros(nbins) binrr = np.zeros(nbins) binxaplev = np.zeros(nbins) binyaplev = np.zeros(nbins) binraplev = np.zeros(nbins) binxapstd = np.zeros(nbins) binyapstd = np.zeros(nbins) binrapstd = np.zeros(nbins) binphase = np.zeros(nbins) binaplev = np.zeros(nbins) binapstd = np.zeros(nbins) for i in range(nbins): start = int(1. * i * nobj / nbins) end = int(1. * (i + 1) * nobj / nbins) binxx[i] = np.mean(xx[start:end]) binyy[i] = np.mean(yy[start:end]) binsxx[i] = np.mean(sxx[start:end]) binsyy[i] = np.mean(syy[start:end]) binrr[i] = np.mean(rr[start:end]) binxaplev[i] = np.median(xaplev[start:end]) binyaplev[i] = np.median(yaplev[start:end]) binraplev[i] = np.median(raplev[start:end]) binxapstd[i] = np.std(xaplev[start:end]) / np.sqrt(end - start) binyapstd[i] = np.std(yaplev[start:end]) / np.sqrt(end - start) binrapstd[i] = np.std(raplev[start:end]) / np.sqrt(end - start) binphase[i] = np.mean(phase[start:end]) binaplev[i] = np.median(aplev[start:end]) binapstd[i] = np.std(aplev[start:end]) / np.sqrt(end - start) # PLOT 1: flux plt.figure(501) plt.errorbar(binphase, binaplev, binapstd, fmt=fmt1[pos], linewidth=1, label=('pos %i' % (pos))) plt.title(event.planetname + ' Phase vs. Binned Flux') plt.xlabel('Orbital Phase') plt.ylabel('Flux') plt.legend(loc='best') # PLOT 2: position-flux plt.figure(502) plt.subplot(2, 1, 1) plt.title(event.planetname + ' Position vs. Binned Flux') plt.errorbar(binyy, binyaplev, binyapstd, fmt=fmt1[pos], label=('pos %i y' % (pos))) plt.ylabel('Flux') plt.legend(loc='best') plt.subplot(2, 1, 2) plt.errorbar(binxx, binxaplev, binxapstd, fmt=fmt1[pos], label=('pos %i x' % (pos))) plt.xlabel('Pixel Postion') plt.ylabel('Flux') plt.legend(loc='best') #PLOT 3: position-phase plt.figure(503) plt.plot(phase, x, 'b,') plt.plot(phase, y, 'r,') plt.title(event.planetname + ' Phase vs. Position') plt.xlabel('Orbital Phase') plt.ylabel('Pixel Position') plt.legend('xy') #PLOT 4: flux-radial distance plt.figure(504) plt.errorbar(binrr, binraplev, binrapstd, fmt=fmt1[pos], label=('pos %i' % (pos))) plt.title(event.planetname + ' Radial Distance vs. Flux') plt.xlabel('Distance From Center of Pixel') plt.ylabel('Flux') plt.legend(loc='best') # ::::::::::: Background setting ::::::::::::::::: if np.size(background) != 0: # number of points per bin: npoints = 42 nbins = int(np.size(background) / npoints) medianbg = np.zeros(nbins) bphase = np.zeros(nbins) # background bin phase bintime = np.zeros(nbins) # background bin JD time for i in range(nbins): start = int(1.0 * i * npoints) end = int(1.0 * (i + 1) * npoints) medianbg[i] = np.median(background[start:end]) bphase[i] = np.mean(phase[start:end]) bintime[i] = np.mean(jdtime[start:end]) # PLOT 5: background-phase day = int(np.floor(np.amin(jdtime))) timeunits1 = jdtime - day timeunits2 = bintime - day xlabel = 'JD - ' + str(day) if event.ecltype == 's': timeunits1 = phase timeunits2 = bphase xlabel = 'Phase' plt.figure(505) plt.plot(timeunits1, background, color='0.45', linestyle='None', marker=',') if np.size(background) > 10000: plt.plot(timeunits2, medianbg, fmt2[pos], label='median bins') plt.title(event.planetname + ' Background level') plt.xlabel(xlabel) plt.ylabel('Flux') # PLOT 6: width-flux plt.figure(506) plt.subplot(2, 1, 1) plt.title(event.planetname + ' Gaussian Width vs. Binned Flux') plt.errorbar(binsyy, binyaplev, binyapstd, fmt=fmt1[pos], label=('width %i y' % (pos))) plt.ylabel('Flux') plt.legend(loc='best') plt.subplot(2, 1, 2) plt.errorbar(binsxx, binxaplev, binxapstd, fmt=fmt1[pos], label=('width %i x' % (pos))) plt.xlabel('Gaussian Width') plt.ylabel('Flux') plt.legend(loc='best') figname1 = str(event.eventname) + "-fig501.png" figname2 = str(event.eventname) + "-fig502.png" figname3 = str(event.eventname) + "-fig503.png" figname4 = str(event.eventname) + "-fig504.png" figname5 = str(event.eventname) + "-fig505.png" figname6 = str(event.eventname) + "-fig506.png" plt.figure(501) plt.savefig(figname1) plt.figure(502) plt.savefig(figname2) plt.figure(503) plt.savefig(figname3) plt.figure(504) plt.savefig(figname4) plt.figure(505) plt.plot(timeunits1[0], background[0], color='0.45', linestyle='None', marker=',', label='all points') plt.legend(loc='best') plt.savefig(figname5) plt.figure(506) plt.savefig(figname6) # Saving me.saveevent(event, event.eventname + "_p5c") cwd = os.getcwd() + "/" # Print outputs, end-time, and close log. log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_p5c.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) log.writelog("Figures:") log.writelog(" " + cwd + figname1) log.writelog(" " + cwd + figname2) log.writelog(" " + cwd + figname3) log.writelog(" " + cwd + figname4) log.writelog(" " + cwd + figname5) log.writelog(" " + cwd + figname6) log.writeclose('\nEnd Checks: ' + time.ctime()) return event
def checks1(eventname, cwd, period=None, ephtime=None): owd = os.getcwd() os.chdir(cwd) # Load the Event event = me.loadevent(eventname) # Create a log oldlogname = event.logname logname = event.eventname + "_p5.log" log = le.Logedit(logname, oldlogname) log.writelog('\nStart Checks: ' + time.ctime()) # If p5 run after p3: we are using results from PSFfit: if not hasattr(event, "phottype"): event.phottype = "psffit" try: os.mkdir("psffit/") except: pass os.chdir("psffit/") # Move frame parameters to fit Kevin's syntax: # event.fp.param --> event.param event.filenames = event.fp.filename event.x = event.fp.x event.y = event.fp.y event.time = event.fp.time event.pos = event.fp.pos event.frmvis = event.fp.frmvis event.filename = event.eventname event.aplev = event.fp.aplev event.background = event.fp.skylev event.good = event.fp.good if event.phottype == "aper": event.aperr = event.fp.aperr log.writelog('Photometry method is APERTURE') elif event.phottype == "var": event.aperr = event.fp.aperr log.writelog('Photometry method is VARIABLE APERTURE') elif event.phottype == "ell": event.aperr = event.fp.aperr log.writelog('Photometry method is ELLIPTICAL APERTURE') elif event.phottype == "psffit": # FINDME: do something with aperr event.aperr = .0025 * np.mean(event.aplev) * np.ones( np.shape(event.aplev)) log.writelog('Photometry method is PSF FITTING') elif event.phottype == "optimal": event.aperr = event.fp.aperr log.writelog('Photometry method is OPTIMAL') # UPDATE period AND ephtime if period is not None: event.period = period[0] event.perioderr = period[1] if ephtime is not None: event.ephtime = ephtime[0] event.ephtimeerr = ephtime[1] log.writelog("\nCurrent event = " + event.eventname) log.writelog("Kurucz file = " + event.kuruczfile) log.writelog("Filter file = " + event.filtfile) # Light-time correction to BJD: # Julian observation date #event.juldat = event.jdjf80 + event.fp.time / 86400.0 event.juldat = event.fp.juldat = event.j2kjd + event.fp.time / 86400.0 if not event.ishorvec: log.writeclose('\nHorizon file not found!') return print("Calculating BJD correction...") event.fp.bjdcor = np.zeros(event.fp.juldat.shape) # Sometimes bad files are just missing files, in which case they have # times of 0, which causes problem in the following interpolation. So # we must mask out these files. We don't use the event.fp.good mask # because we may want to know the bjd of bad images nonzero = np.where(event.fp.time != 0.0) event.fp.bjdcor[nonzero] = stc.suntimecorr(event.ra, event.dec, event.fp.juldat[nonzero], event.horvecfile) # Get bjd times: event.bjdcor = event.fp.bjdcor #event.bjddat = event.fp.juldat + event.fp.bjdcor / 86400.0 event.bjdutc = event.fp.juldat + event.fp.bjdcor / 86400.0 # utc bjd date event.bjdtdb = np.empty(event.bjdutc.shape) for i in range(event.bjdtdb.shape[0]): event.bjdtdb[i] = utc_tt.utc_tdb(event.bjdutc[i], event.topdir + '/' + event.leapdir) # terrestial bjd date # ccampo 3/18/2011: check which units phase should be in try: if event.tep.ttrans.unit == "BJDTDB": event.timestd = "tdb" event.fp.phase = tp.time2phase(event.bjdtdb, event.ephtime, event.period, event.ecltype) else: event.timestd = "utc" event.fp.phase = tp.time2phase(event.bjdutc, event.ephtime, event.period, event.ecltype) except: event.timestd = "utc" event.fp.phase = tp.time2phase(event.bjdutc, event.ephtime, event.period, event.ecltype) # assign phase variable event.phase = event.fp.phase # ccampo 3/18/2011: moved this above # Eclipse phase, BJD #event.fp.phase = tp.time2phase(event.fp.juldat + event.fp.bjdcor / 86400.0, # event.ephtime, event.period, event.ecltype) # verify leapsecond correction hfile = event.filenames[0, 0] try: image, event.header = fits.getdata(hfile, header=True) dt = ((event.bjdtdb - event.bjdutc) * 86400.0)[0, 0] dt2 = event.header['ET_OBS'] - event.header['UTCS_OBS'] log.writelog('Leap second correction : ' + str(dt) + ' = ' + str(dt2)) except: log.writelog('Could not verify leap-second correction.') log.writelog('Min and Max light-time correction: ' + np.str(np.amin(event.fp.bjdcor)) + ', ' + np.str(np.amax(event.fp.bjdcor)) + ' seconds') # Verify light-time correction try: image, event.header = fits.getdata(hfile, header=True) try: log.writelog('BJD Light-time correction: ' + str(event.bjdcor[0, 0]) + ' = ' + str((event.header['BMJD_OBS'] - event.header['MJD_OBS']) * 86400)) except: log.writelog('HJD Light-time correction: ' + str(event.bjdcor[0, 0]) + ' = ' + str((event.header['HMJD_OBS'] - event.header['MJD_OBS']) * 86400)) except: log.writelog('Could not verify light-time correction.') # Number of good frames should be > 95% log.writelog("Good Frames = %7.3f" % (np.mean(event.good) * 100) + " %") log.writelog('\nCentering: X mean X stddev Y mean Y stddev') for pos in range(event.npos): log.writelog( 'position %2d:' % pos + ' %10.5f' % np.mean(event.x[pos, np.where(event.good[pos])]) + ' %9.5f' % np.std(event.x[pos, np.where(event.good[pos])]) + ' %10.5f' % np.mean(event.y[pos, np.where(event.good[pos])]) + ' %9.5f' % np.std(event.y[pos, np.where(event.good[pos])])) # COMPUTE RMS POSITION CONSISTENCY event.xprecision = np.sqrt(np.mean(np.ediff1d(event.x)**2)) event.yprecision = np.sqrt(np.mean(np.ediff1d(event.y)**2)) log.writelog('RMS of x precision = ' + str(np.round(event.xprecision, 4)) + ' pixels.') log.writelog('RMS of y precision = ' + str(np.round(event.yprecision, 4)) + ' pixels.') if event.phottype == "aper": log.writelog('\nCenter & photometry half-width/aperture sizes = ' + str(event.ctrim) + ', ' + str(event.photap) + ' pixels.') log.writelog('Period = ' + str(event.period) + ' +/- ' + str(event.perioderr) + ' days') log.writelog('Ephemeris = ' + str(event.ephtime) + ' +/- ' + str(event.ephtimeerr) + ' JD') # Compute elliptical area if gaussian centering if event.method == 'fgc' or event.method == 'rfgc': event.fp.ellarea = np.pi * (3 * event.fp.xsig) * (3 * event.fp.ysig) fmt1 = [ 'bo', 'go', 'yo', 'ro', 'ko', 'co', 'mo', 'bs', 'gs', 'ys', 'rs', 'ks', 'cs', 'ms' ] fmt2 = ['b,', 'g,', 'y,', 'r,'] fmt3 = ['b.', 'g.', 'y.', 'r.'] plt.figure(501) plt.clf() plt.figure(502, figsize=(8, 12)) plt.clf() plt.figure(503) plt.clf() plt.figure(504) plt.clf() plt.figure(505) plt.clf() for pos in range(event.npos): wheregood = np.where(event.good[pos, :]) # CHOOSE ONLY GOOD FRAMES FOR PLOTTING phase = event.phase[pos, :][wheregood] aplev = event.aplev[pos, :][wheregood] jdtime = event.bjdutc[pos, :][wheregood] background = event.background[pos, :][wheregood] noisepix = event.fp.noisepix[pos, :][wheregood] if event.method == "fgc" or event.method == "rfgc": ellarea = event.fp.ellarea[pos, :][wheregood] rot = event.fp.rot[pos, :][wheregood] # COMPUTE X AND Y PIXEL LOCATION RELATIVE TO ... if event.npos > 1: # CENTER OF EACH PIXEL y = (event.y[pos, :] - np.round(event.y[pos, :]))[wheregood] x = (event.x[pos, :] - np.round(event.x[pos, :]))[wheregood] else: # CENTER OF MEDIAN PIXEL y = (event.y[pos, :] - np.round(np.median(event.y)))[wheregood] x = (event.x[pos, :] - np.round(np.median(event.x)))[wheregood] # SORT aplev BY x, y AND radial POSITIONS rad = np.sqrt(x**2 + y**2) xx = np.sort(x) yy = np.sort(y) rr = np.sort(rad) xaplev = aplev[np.argsort(x)] yaplev = aplev[np.argsort(y)] raplev = aplev[np.argsort(rad)] # BIN RESULTS FOR PLOTTING POSITION SENSITIVITY EFFECT nobj = aplev.size nbins = 120 // event.npos binxx = np.zeros(nbins) binyy = np.zeros(nbins) binrr = np.zeros(nbins) binxaplev = np.zeros(nbins) binyaplev = np.zeros(nbins) binraplev = np.zeros(nbins) binxapstd = np.zeros(nbins) binyapstd = np.zeros(nbins) binrapstd = np.zeros(nbins) binphase = np.zeros(nbins) binaplev = np.zeros(nbins) binapstd = np.zeros(nbins) binnpix = np.zeros(nbins) for i in range(nbins): start = int(1. * i * nobj / nbins) end = int(1. * (i + 1) * nobj / nbins) binxx[i] = np.mean(xx[start:end]) binyy[i] = np.mean(yy[start:end]) binrr[i] = np.mean(rr[start:end]) binxaplev[i] = np.median(xaplev[start:end]) binyaplev[i] = np.median(yaplev[start:end]) binraplev[i] = np.median(raplev[start:end]) binxapstd[i] = np.std(xaplev[start:end]) / np.sqrt(end - start) binyapstd[i] = np.std(yaplev[start:end]) / np.sqrt(end - start) binrapstd[i] = np.std(raplev[start:end]) / np.sqrt(end - start) binphase[i] = np.mean(phase[start:end]) binaplev[i] = np.median(aplev[start:end]) binapstd[i] = np.std(aplev[start:end]) / np.sqrt(end - start) binnpix[i] = np.mean(noisepix[start:end]) # PLOT 1: flux plt.figure(501) plt.errorbar(binphase, binaplev, binapstd, fmt=fmt1[pos], linewidth=1, label=('pos %i' % (pos))) plt.title(event.planetname + ' Phase vs. Binned Flux') plt.xlabel('Orbital Phase') plt.ylabel('Flux') plt.legend(loc='best') # PLOT 2: position-flux plt.figure(502) plt.subplot(2, 1, 1) plt.title(event.planetname + ' Position vs. Binned Flux') plt.errorbar(binyy, binyaplev, binyapstd, fmt=fmt1[pos], label=('pos %i y' % (pos))) plt.ylabel('Flux') plt.legend(loc='best') plt.subplot(2, 1, 2) plt.errorbar(binxx, binxaplev, binxapstd, fmt=fmt1[pos], label=('pos %i x' % (pos))) plt.xlabel('Pixel Postion') plt.ylabel('Flux') plt.legend(loc='best') #PLOT 3: position-phase plt.figure(503) plt.plot(phase, x, 'b,') plt.plot(phase, y, 'r,') plt.title(event.planetname + ' Phase vs. Position') plt.xlabel('Orbital Phase') plt.ylabel('Pixel Position') plt.legend('xy') #PLOT 4: flux-radial distance plt.figure(504) plt.errorbar(binrr, binraplev, binrapstd, fmt=fmt1[pos], label=('pos %i' % (pos))) plt.title(event.planetname + ' Radial Distance vs. Flux') plt.xlabel('Distance From Center of Pixel') plt.ylabel('Flux') plt.legend(loc='best') # ::::::::::: Background setting ::::::::::::::::: if np.size(background) != 0: # number of points per bin: npoints = 42 nbins = int(np.size(background) // npoints) medianbg = np.zeros(nbins) bphase = np.zeros(nbins) # background bin phase bintime = np.zeros(nbins) # background bin JD time for i in range(nbins): start = int(1.0 * i * npoints) end = int(1.0 * (i + 1) * npoints) medianbg[i] = np.median(background[start:end]) bphase[i] = np.mean(phase[start:end]) bintime[i] = np.mean(jdtime[start:end]) # PLOT 5: background-phase day = int(np.floor(np.amin(jdtime))) timeunits1 = jdtime - day timeunits2 = bintime - day xlabel = 'JD - ' + str(day) if event.ecltype == 's': timeunits1 = phase timeunits2 = bphase xlabel = 'Phase' plt.figure(505) plt.plot(timeunits1, background, color='0.45', linestyle='None', marker=',') if np.size(background) > 10000: plt.plot(timeunits2, medianbg, fmt2[pos], label='median bins') plt.title(event.planetname + ' Background level') plt.xlabel(xlabel) plt.ylabel('Flux') plt.plot(timeunits1[0], background[0], color='0.45', linestyle='None', marker=',', label='all points') plt.legend(loc='best') else: print("WARNING: background has zero size.") #PLOT 7: Noise Pixels Binned plt.figure(507) plt.scatter(binphase, binnpix) plt.xlabel("Orbital Phase") plt.ylabel("Noise Pixels") plt.title(event.planetname + " Binned Noise Pixels") #PLOT 8: Noise Pixel Variance plt.figure(508) npixvar = bd.subarnvar(noisepix, event) subarnbinphase = bd.subarnbin(phase, event) plt.scatter(subarnbinphase, npixvar, s=1) plt.xlabel("Orbital Phase") plt.ylabel("Noise Pixel Variance") plt.title(event.planetname + " Noise Pixels Variance") #PLOT 9 and 10: Elliptical Area and Variance if event.method == 'fgc' or event.method == 'rfgc': plt.figure(509) plt.scatter(phase, ellarea, s=0.1) plt.xlabel("Orbital Phase") plt.ylabel("Elliptical Area") plt.title(event.planetname + " Gaussian Centering Elliptical Area") plt.figure(510) ellareavar = bd.subarnvar(ellarea, event) plt.scatter(subarnbinphase, ellareavar, s=1) plt.xlabel("Orbital Phase") plt.ylabel("Elliptical Area Variance") plt.title(event.planetname + " Elliptical Area Variance") if event.method == 'rfgc': plt.figure(511) plt.scatter(phase, rot % (np.pi / 2) * 180 / np.pi, s=1) plt.xlabel("Orbital Phase") plt.ylabel("Rotation (deg)") plt.title(event.planetname + " Gaussian Centering Rotation") #PLOT 6: Preflash if event.havepreflash: plt.figure(506) plt.errorbar((event.prefp.time[0] - event.prefp.time[0, 0]) / 60., event.prefp.aplev[0], yerr=event.prefp.aperr[0], fmt="o") plt.xlabel("Time since start of preflash (minutes)") plt.ylabel("Flux") plt.title(event.planetname + " Preflash") figname1 = str(event.eventname) + "-fig501.png" figname2 = str(event.eventname) + "-fig502.png" figname3 = str(event.eventname) + "-fig503.png" figname4 = str(event.eventname) + "-fig504.png" figname5 = str(event.eventname) + "-fig505.png" figname6 = str(event.eventname) + "-fig506.png" figname7 = str(event.eventname) + "-fig507.png" figname8 = str(event.eventname) + "-fig508.png" figname9 = str(event.eventname) + "-fig509.png" figname10 = str(event.eventname) + "-fig510.png" figname11 = str(event.eventname) + "-fig511.png" plt.figure(501) plt.savefig(figname1) plt.figure(502) plt.savefig(figname2) plt.figure(503) plt.savefig(figname3) plt.figure(504) plt.savefig(figname4) plt.figure(505) plt.savefig(figname5) plt.figure(506) if event.havepreflash: plt.savefig(figname6) plt.figure(507) plt.savefig(figname7) plt.figure(508) plt.savefig(figname8) if event.method == 'fgc' or event.method == 'rfgc': plt.figure(509) plt.savefig(figname9) plt.figure(510) plt.savefig(figname10) if event.method == 'rfgc': plt.figure(511) plt.savefig(figname11) # Saving me.saveevent(event, event.eventname + "_p5c") cwd += "/" # Print outputs, end-time, and close log. log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_p5c.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) log.writelog("Figures:") log.writelog(" " + cwd + figname1) log.writelog(" " + cwd + figname2) log.writelog(" " + cwd + figname3) log.writelog(" " + cwd + figname4) log.writelog(" " + cwd + figname5) if event.havepreflash: log.writelog(" " + cwd + figname6) log.writelog(" " + cwd + figname7) log.writelog(" " + cwd + figname8) if event.method == 'fgc' or event.method == 'rfgc': log.writelog(" " + cwd + figname9) log.writelog(" " + cwd + figname10) if event.method == 'rfgc': log.writelog(" " + cwd + figname11) log.writeclose('\nEnd Checks: ' + time.ctime()) os.chdir(owd) return event
def centering(event, pcf, centerdir): tini = time.time() # Create centering log logname = event.logname log = le.Logedit(centerdir + "/" + logname, logname) log.writelog("\nStart " + centerdir + " centering: " + time.ctime()) os.chdir(centerdir) # copy center.pcf in centerdir pcf.make_file("center.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key).get()) # Check least asym parameters work: if event.method in ['lac', 'lag']: if event.ctrim < (event.cradius + event.csize) and event.ctrim is not 0: event.ctrim = event.cradius + event.csize + 1 log.writelog('Trim radius is too small, changed to: %i'%event.ctrim) if event.psfctrim < (event.psfcrad + event.psfcsize) and event.psfctrim is not 0: event.psfctrim = event.psfcrad + event.psfcsize + 1 log.writelog('PSF Trim radius is too small, changed to: %i' %event.psfctrim) # Centering bad pixel mask: centermask = np.ones((event.ny, event.nx)) if event.ymask is not None: ymask = np.asarray(event.ymask, int) xmask = np.asarray(event.xmask, int) for i in np.arange(len(ymask)): centermask[ymask[i], xmask[i]] = 0 # PSF: # Re-evaluate if a PSF has been redefined: if event.newpsf is not None: event.ispsf = os.path.isfile(event.newpsf) if event.ispsf: event.psffile = event.newpsf log.writelog('The PSF file has been redefined!') log.writelog("PSF: " + event.psffile) # PSF Centering: if event.ispsf: event.psfim = pf.getdata(event.psffile) # Guess of the center of the PSF (center of psfim) psfctrguess = np.asarray(np.shape(event.psfim))/2 # Do not find center of PSF: if event.nopsfctr: event.psfctr = psfctrguess # Find center of PSF: else: ''' if event.method == "bpf" or event.method == "ipf": method = "fgc" else: method = event.method event.psfctr, extra = cd.centerdriver(method, event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize) ''' # Always use 'fgc' on PSF, for testing event.psfctr, extra = cd.centerdriver("fgc", event.psfim, psfctrguess, event.psfctrim, event.psfcrad, event.psfcsize) #FINDME log.writelog('PSF center found.') print(event.psfctr) #FINDME else: event.psfim = None event.psfctr = None log.writelog('No PSF supplied.') # Find center of the mean Image: event.targpos = np.zeros((2, event.npos)) for pos in np.arange(event.npos): meanim = event.meanim[:,:,pos] guess = event.srcest[:, pos] targpos, extra = cd.centerdriver(event.method, meanim, guess, event.ctrim, event.cradius, event.csize, fitbg=event.fitbg, psf=event.psfim, psfctr=event.psfctr, expand=event.expand) event.targpos[:,pos] = targpos log.writelog("Center position(s) of the mean Image(s):\n" + str(np.transpose(event.targpos))) # Inclusion :::::::: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( event.maxnimpos = int(event.maxnimpos) event.npos = int(event.npos) x = Array("d", np.zeros(event.npos * event.maxnimpos)) y = Array("d", np.zeros(event.npos * event.maxnimpos)) sx = Array("d", np.zeros(event.npos * event.maxnimpos)) sy = Array("d", np.zeros(event.npos * event.maxnimpos)) flux = Array("d", np.zeros(event.npos * event.maxnimpos)) sky = Array("d", np.zeros(event.npos * event.maxnimpos)) goodfit = Array("d", np.zeros(event.npos * event.maxnimpos)) # Size of chunk of data each core will process: chunksize = event.maxnimpos/event.ccores + 1 print("Number of cores: " + str(event.ccores)) # Start Muti Procecess: :::::::::::::::::::::::::::::::::::::: processes = [] for nc in np.arange(event.ccores): start = nc * chunksize # Starting index to process end = (nc+1) * chunksize # Ending index to process proc = Process(target=do_center, args=(start, end, event, centermask, log, x, y, sx, sy, flux, sky, goodfit)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in np.arange(event.ccores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.x = np.asarray(x ).reshape(event.npos,event.maxnimpos) event.fp.y = np.asarray(y ).reshape(event.npos,event.maxnimpos) event.fp.sx = np.asarray(sx ).reshape(event.npos,event.maxnimpos) event.fp.sy = np.asarray(sy ).reshape(event.npos,event.maxnimpos) # If PSF fit: if event.method in ["ipf", "bpf"]: event.fp.flux = np.asarray(flux ).reshape(event.npos,event.maxnimpos) event.fp.psfsky = np.asarray(sky ).reshape(event.npos,event.maxnimpos) event.fp.goodfit = np.asarray(goodfit).reshape(event.npos,event.maxnimpos) # :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: # Pixel R position: event.fp.r = np.sqrt((event.fp.x % 1.0 - 0.5)**2.0 + (event.fp.y % 1.0 - 0.5)**2.0 ) log.writelog("End frames centering.") # Save print("\nSaving") if event.denoised: me.saveevent(event, event.eventname + "_ctr", save=['dendata', 'data', 'uncd', 'mask']) else: me.saveevent(event, event.eventname + "_ctr", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.centerdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_ctr.dat") log.writelog(" " + cwd + event.eventname + "_ctr.h5") log.writelog("Log:") log.writelog(" " + cwd + event.logname) dt = t.hms_time(time.time()-tini) log.writeclose("\nEnd Centering. Time (h:m:s): %s"%dt + " (" + event.centerdir + ")") print("------------- ------------\n") if hasattr(event, 'runp4') and event.runp4 == True: os.chdir(event.eventdir) os.system("poet.py p4 %s/"%event.centerdir)
def badpix(eventname, cwd): """ Modification History: --------------------- 2010-??-?? patricio Initial Python implementation 2014-08-13 garland switched the pyfits package to astropy.io.fits [email protected] 2017-06-20 zacchaeus Fixed None comparisons [email protected] """ owd = os.getcwd() os.chdir(cwd) tini = time.time() # Load the event event = me.loadevent(eventname) # Load the data me.updateevent(event, eventname, event.loadnext) # Create a new log starting from the old one. oldlogname = event.logname logname = event.eventname + ".log" log = le.Logedit(logname, oldlogname) event.logname = logname log.writelog('\nMARK: ' + time.ctime() + ': Starting p2badpix.') # ccampo 3/18/2011: do this in p5 # Julian observation date #event.fp.juldat = event.jdjf80 + event.fp.time / 86400.0 # ::::::::::::::::::::::: UNCERTAINTIES :::::::::::::::::::::::::::::::: # IRAC subarray data come with bogus uncertainties that are not linearly # related to photon noise. We scale them later, using the reduced chi # squared from the model fit. # ::::::::::::::::::::::: FLUX CONVERSION ::::::::::::::::::::::::::::: # Do we want flux (uJy/pix) or surface brightness (MJy/sr) units? If # doing photometry, convert to flux. Since we care about relative # numbers, it doesn't really matter. # Convert from surface brightness (MJy/sr) to flux units (uJy/pix) if event.fluxunits: log.writelog('Converting surface brightness to flux') event.data, event.uncd = btf.poet_bright2flux(event.data, event.uncd, event.posscl) if event.havepreflash: event.predata, event.preuncd = btf.poet_bright2flux( event.predata, event.preuncd, event.posscl) if event.havepostcal: event.postdata, event.postuncd = btf.poet_bright2flux( event.postdata, event.postuncd, event.posscl) else: log.writelog('Did not convert bright to flux.') # Mean Background Estimate, from zodi model event.estbg = (np.mean(event.fp.zodi[np.where(event.fp.exist)]) + np.mean(event.fp.ism[np.where(event.fp.exist)]) + np.mean(event.fp.cib[np.where(event.fp.exist)])) if event.fluxunits: event.estbg *= (event.srperas * 1e12 * np.mean(event.posscl[0, :]) * np.mean(event.posscl[1, :])) # Bad Pixel Masking log.writelog('Find and fix bad pixels') # Get permanent bad pixel mask. if not event.ispmask[0]: log.writelog('\nPermanent Bad pixel mask not found!') else: hdu = fits.open(event.pmaskfile[0]) if hdu[0].header['bitpix'] == -32: # if data type is float hdu[0].scale(type='int16') # cast it down to int16 event.pmask = hdu[0].data # IRS FIX: # IRS data contains the blue peak subarray while its pmask contains # the whole array (Hard coding) if event.photchan == 5: event.pmask = event.pmask[3:59, 86:127] # Do NOT define sigma, we have a different scheme for finding baddies # adds Spitzer rejects: fp.nsstrej & our rejects: fp.nsigrej event.mask = pbm.poet_badmask(event.data, event.uncd, event.pmask, event.inst.pcrit, event.bdmskd, event.inst.dcrit, event.fp, nimpos=event.nimpos) # User rejected pixels: if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.mask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.fp.userrej = np.sum(np.sum(1 - event.mask, axis=1), axis=1) event.fp.userrej = np.transpose(event.fp.userrej) - event.fp.nsstrej else: event.fp.userrej = np.zeros((event.npos, event.maxnimpos)) # define sigma here. # adds median sky: fp.medsky event.meanim = pcb.poet_chunkbad(event.data, event.uncd, event.mask, event.nimpos, event.sigma, event.szchunk, event.fp, event.nscyc) log.writelog('Masks combined') # Repeat procedure for preflash and postcal data: if event.havepreflash: event.premask = pbm.poet_badmask(event.predata, event.preuncd, event.pmask, event.inst.pcrit, event.prebdmskd, event.inst.dcrit, event.prefp, nimpos=event.prenimpos) if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.premask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.prefp.userrej = np.sum(np.sum(1 - event.premask, axis=1), axis=1) event.prefp.userrej = np.transpose( event.prefp.userrej) - event.prefp.nsstrej else: event.prefp.userrej = np.zeros((event.npos, event.premaxnimpos)) event.premeanim = pcb.poet_chunkbad(event.predata, event.preuncd, event.premask, event.prenimpos, event.sigma, event.szchunk, event.prefp, event.nscyc) if event.havepostcal: event.postmask = pbm.poet_badmask(event.postdata, event.postuncd, event.pmask, event.inst.pcrit, event.postbdmskd, event.inst.dcrit, event.postfp, nimpos=event.postnimpos) if event.userrej is not None: for i in range(np.shape(event.userrej)[0]): event.postmask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.postfp.userrej = np.sum(np.sum(1 - event.postmask, axis=1), axis=1) event.postfp.userrej = np.transpose(event.postfp.userrej) - \ event.postfp.nsstrej else: event.postfp.userrej = np.zeros((event.npos, event.postmaxnimpos)) event.postmeanim = pcb.poet_chunkbad(event.postdata, event.postuncd, event.postmask, event.postnimpos, event.sigma, event.szchunk, event.postfp, event.nscyc) for pos in range(event.npos): fits.writeto(event.eventname + "_medpostcal.fits", event.postmeanim[:, :, pos], clobber=True) # Delete post calibration data: event.havepostcal = False del (event.postdata) del (event.postmask) del (event.postuncd) del (event.postbdmskd) # Save the data if event.instrument == 'mips': todel = ['bdmskd', 'brmskd'] # what to delete else: todel = ['bdmskd'] me.saveevent(event, event.eventname + "_bpm", save=['data', 'uncd', 'mask'], delete=todel) # Print time elapsed and close log: log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + '/' + event.eventname + "_bpm.dat") log.writelog(" " + cwd + '/' + event.eventname + "_bpm.h5") log.writelog("Log:") log.writelog(" " + cwd + '/' + logname) dt = t.hms_time(time.time() - tini) log.writeclose('\nBad pixel masking time (h:m:s): %s ' % dt) os.chdir(owd) if event.runp3: #poet.p(3) os.system("python3 poet.py p3")
def photometry(event, pcf, photdir, mute, owd): tini = time.time() # Create photometry log logname = event.logname log = le.Logedit(photdir + "/" + logname, logname) log.writelog("\nStart " + photdir + " photometry: " + time.ctime()) parentdir = os.getcwd() + "/" os.chdir(photdir) # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: setattr(event, key, attrib.get(key)) maxnimpos, npos = event.maxnimpos, event.npos # allocating frame parameters: event.fp.aplev = np.zeros((npos, maxnimpos)) event.fp.aperr = np.zeros((npos, maxnimpos)) event.fp.nappix = np.zeros((npos, maxnimpos)) event.fp.skylev = np.zeros((npos, maxnimpos)) event.fp.skyerr = np.zeros((npos, maxnimpos)) event.fp.nskypix = np.zeros((npos, maxnimpos)) event.fp.nskyideal = np.zeros((npos, maxnimpos)) event.fp.status = np.zeros((npos, maxnimpos)) event.fp.good = np.zeros((npos, maxnimpos)) # For interpolated aperture photometry, we need to "interpolate" the # mask, which requires float values. Thus, we convert the mask to # floats (this needs to be done before processes are spawned or memory # usage balloons). if event.mask.dtype != float: event.mask = event.mask.astype(float) # Aperture photometry: if event.phottype == "aper": # not event.dooptimal or event.from_aper is None: # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 event.aparr = np.ones(npos * maxnimpos) * event.photap + event.offset print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "var": # variable aperture radius # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 event.aparr = event.fp.noisepix[0]**.5 * event.photap + event.offset print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "ell": # elliptical # Multy Process set up: # Shared memory arrays allow only 1D Arrays :( aplev = Array("d", np.zeros(npos * maxnimpos)) # aperture flux aperr = Array("d", np.zeros(npos * maxnimpos)) # aperture error nappix = Array("d", np.zeros(npos * maxnimpos)) # number of aperture pixels skylev = Array("d", np.zeros(npos * maxnimpos)) # sky level skyerr = Array("d", np.zeros(npos * maxnimpos)) # sky error nskypix = Array("d", np.zeros(npos * maxnimpos)) # number of sky pixels nskyideal = Array("d", np.zeros( npos * maxnimpos)) # ideal number of sky pixels status = Array("d", np.zeros(npos * maxnimpos)) # apphot return status good = Array("d", np.zeros(npos * maxnimpos)) # good flag # Size of chunk of data each core will process: chunksize = maxnimpos // event.ncores + 1 print("Number of cores: " + str(event.ncores)) # Start Muti Procecess: processes = [] for nc in range(event.ncores): start = nc * chunksize # Starting index to process end = (nc + 1) * chunksize # Ending index to process proc = Process(target=do_aphot, args=(start, end, event, log, mute, aplev, aperr, nappix, skylev, skyerr, nskypix, nskyideal, status, good, 0)) processes.append(proc) proc.start() # Make sure all processes finish their work: for nc in range(event.ncores): processes[nc].join() # Put the results in the event. I need to reshape them: event.fp.aplev = np.asarray(aplev).reshape(npos, maxnimpos) event.fp.aperr = np.asarray(aperr).reshape(npos, maxnimpos) event.fp.nappix = np.asarray(nappix).reshape(npos, maxnimpos) event.fp.skylev = np.asarray(skylev).reshape(npos, maxnimpos) event.fp.skyerr = np.asarray(skyerr).reshape(npos, maxnimpos) event.fp.nskypix = np.asarray(nskypix).reshape(npos, maxnimpos) event.fp.nskyideal = np.asarray(nskyideal).reshape(npos, maxnimpos) event.fp.status = np.asarray(status).reshape(npos, maxnimpos) event.fp.good = np.asarray(good).reshape(npos, maxnimpos) # raw photometry (no sky subtraction): event.fp.apraw = (event.fp.aplev + (event.fp.skylev * event.fp.nappix)) # Print results into the log if it wasn't done before: for pos in range(npos): for i in range(event.nimpos[pos]): log.writelog( '\nframe =%7d ' % i + 'pos =%5d ' % pos + 'y =%7.3f ' % event.fp.y[pos, i] + 'x =%7.3f' % event.fp.x[pos, i] + '\n' + 'aplev =%11.3f ' % event.fp.aplev[pos, i] + 'aperr =%9.3f ' % event.fp.aperr[pos, i] + 'nappix =%6.2f' % event.fp.nappix[pos, i] + '\n' + 'skylev=%11.3f ' % event.fp.skylev[pos, i] + 'skyerr=%9.3f ' % event.fp.skyerr[pos, i] + 'nskypix=%6.2f ' % event.fp.nskypix[pos, i] + 'nskyideal=%6.2f' % event.fp.nskyideal[pos, i] + '\n' + 'status=%7d ' % event.fp.status[pos, i] + 'good =%5d' % event.fp.good[pos, i], mute=True) elif event.phottype == "psffit": event.fp.aplev = event.fp.flux event.fp.skylev = event.fp.psfsky event.fp.good = np.zeros((event.npos, event.maxnimpos)) for pos in range(event.npos): event.fp.good[pos, 0:event.nimpos[pos]] = 1 elif event.phottype == "optimal": # utils for profile construction: pshape = np.array([2 * event.otrim + 1, 2 * event.otrim + 1]) subpsf = np.zeros(np.asarray(pshape, int) * event.expand) x = np.indices(pshape) clock = t.Timer(np.sum(event.nimpos), progress=np.array([0.05, 0.1, 0.25, 0.5, 0.75, 1.1])) for pos in range(npos): for i in range(event.nimpos[pos]): # Integer part of center of subimage: cen = np.rint([event.fp.y[pos, i], event.fp.x[pos, i]]) # Center in the trimed image: loc = (event.otrim, event.otrim) # Do the trim: img, msk, err = ie.trimimage(event.data[i, :, :, pos], *cen, *loc, mask=event.mask[i, :, :, pos], uncd=event.uncd[i, :, :, pos]) # Center of star in the subimage: ctr = (event.fp.y[pos, i] - cen[0] + event.otrim, event.fp.x[pos, i] - cen[1] + event.otrim) # Make profile: # Index of the position in the supersampled PSF: pix = pf.pos2index(ctr, event.expand) profile, pctr = pf.make_psf_binning(event.psfim, pshape, event.expand, [pix[0], pix[1], 1.0, 0.0], event.psfctr, subpsf) #subtract the sky level: img -= event.fp.psfsky[pos, i] # optimal photometry calculation: immean, uncert, good = op.optphot(img, profile, var=err**2.0, mask=msk) event.fp.aplev[pos, i] = immean event.fp.aperr[pos, i] = uncert event.fp.skylev[pos, i] = event.fp.psfsky[pos, i] event.fp.good[pos, i] = good # Report progress: clock.check(np.sum(event.nimpos[0:pos]) + i, name=event.centerdir) # START PREFLASH EDIT ::::::::::::::::::::::::::::::::::::: # Do aperture on preflash data: if event.havepreflash: print("\nStart preflash photometry:") premaxnimpos = event.premaxnimpos preaplev = Array("d", np.zeros(npos * premaxnimpos)) preaperr = Array("d", np.zeros(npos * premaxnimpos)) prenappix = Array("d", np.zeros(npos * premaxnimpos)) preskylev = Array("d", np.zeros(npos * premaxnimpos)) preskyerr = Array("d", np.zeros(npos * premaxnimpos)) preskynpix = Array("d", np.zeros(npos * premaxnimpos)) preskyideal = Array("d", np.zeros(npos * premaxnimpos)) prestatus = Array("d", np.zeros(npos * premaxnimpos)) pregood = Array("d", np.zeros(npos * premaxnimpos)) # Start Procecess: mute = False proc = Process(target=do_aphot, args=(0, event.prenimpos[0], event, log, mute, preaplev, preaperr, prenappix, preskylev, preskyerr, preskynpix, preskyideal, prestatus, pregood, 1)) proc.start() proc.join() # Put the results in the event. I need to reshape them: event.prefp.aplev = np.asarray(preaplev).reshape(npos, premaxnimpos) event.prefp.aperr = np.asarray(preaperr).reshape(npos, premaxnimpos) event.prefp.nappix = np.asarray(prenappix).reshape(npos, premaxnimpos) event.prefp.status = np.asarray(prestatus).reshape(npos, premaxnimpos) event.prefp.skylev = np.asarray(preskylev).reshape(npos, premaxnimpos) event.prefp.good = np.asarray(pregood).reshape(npos, premaxnimpos) # raw photometry (no sky subtraction): event.prefp.aplev = (event.prefp.aplev + (event.prefp.skylev * event.prefp.nappix)) # END PREFLASH EDIT ::::::::::::::::::::::::::::::::::::::: if event.method in ["bpf"]: event.ispsf = False # PSF aperture correction: if event.ispsf and event.phottype == "aper": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], event.photap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) if event.ispsf and event.phottype == "var": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 avgap = np.mean(event.aparr) event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.apphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], avgap * event.psfexpand, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) if event.ispsf and event.phottype == "ell": log.writelog('Calculating PSF aperture:') event.psfim = event.psfim.astype(np.float64) imerr = np.ones(np.shape(event.psfim)) imask = np.ones(np.shape(event.psfim)) skyfrac = 0.1 avgxwid = np.mean(event.fp.xsig * event.photap) avgywid = np.mean(event.fp.ysig * event.photap) avgrot = np.mean(event.fp.rot) event.aperfrac, ape, event.psfnappix, event.psfskylev, sle, \ event.psfnskypix, event.psfnskyideal, event.psfstatus \ = ap.elphot_c(event.psfim, imerr, imask, event.psfctr[0], event.psfctr[1], avgxwid * event.psfexpand, avgywid * event.psfexpand, avgrot, event.skyin * event.psfexpand, event.skyout * event.psfexpand, skyfrac, event.apscale, event.skymed) event.aperfrac += event.psfskylev * event.psfnappix event.fp.aplev /= event.aperfrac event.fp.aperr /= event.aperfrac log.writelog('Aperture contains %f of PSF.' % event.aperfrac) # Sadly we must do photometry for every aperture used # Possibly use a range and interpolate? Might be an option # for the future to speed this up. # This is commented out, as it seems to just remove the corrections # made by variable or elliptical photometry # if event.ispsf and (event.phottype == "var" or event.phottype == "ell"): # log.writelog('Calculating PSF aperture. This may take some time.') # event.psfim = event.psfim.astype(np.float64) # imerr = np.ones(np.shape(event.psfim)) # imask = np.ones(np.shape(event.psfim)) # skyfrac = 0.1 # aperfrac = Array("d", np.zeros(npos*maxnimpos))# psf flux # aperfracerr = Array("d", np.zeros(npos*maxnimpos))# psf flux error # psfnappix = Array("d", np.zeros(npos*maxnimpos))# psf aperture pix num # psfsky = Array("d", np.zeros(npos*maxnimpos))# psf sky level # psfskyerr = Array("d", np.zeros(npos*maxnimpos))# psf sky error # psfnskypix = Array("d", np.zeros(npos*maxnimpos))# psf sky pix num # psfnskyideal = Array("d", np.zeros(npos*maxnimpos))# psf ideal sky pix num # psfstatus = Array("d", np.zeros(npos*maxnimpos))# psf return status # psfgood = Array("d", np.zeros(npos*maxnimpos))# psf good flag # processes=[] # for nc in range(event.ncores): # start = nc * chunksize # end = (nc+1) * chunksize # proc = Process(target=do_aphot_psf, args=(start, end, event, log, mute, # aperfrac, aperfracerr, # psfnappix, # psfsky, psfskyerr, # psfnskypix, psfnskyideal, # psfstatus, psfgood)) # processes.append(proc) # proc.start() # for nc in range(event.ncores): # processes[nc].join() # # Reshape # event.aperfrac = np.asarray(aperfrac ).reshape(npos,maxnimpos) # event.aperfracerr = np.asarray(aperfracerr ).reshape(npos,maxnimpos) # event.psfnappix = np.asarray(psfnappix ).reshape(npos,maxnimpos) # event.psfsky = np.asarray(psfsky ).reshape(npos,maxnimpos) # event.psfskyerr = np.asarray(psfskyerr ).reshape(npos,maxnimpos) # event.psfnskypix = np.asarray(psfnskypix ).reshape(npos,maxnimpos) # event.psfnskyideal = np.asarray(psfnskyideal).reshape(npos,maxnimpos) # event.psfstatus = np.asarray(psfstatus ).reshape(npos,maxnimpos) # event.psfgood = np.asarray(psfgood ).reshape(npos,maxnimpos) # event.aperfrac += event.psfsky * event.psfnappix # event.fp.aplev /= event.aperfrac # event.fp.aperr /= event.aperfrac # log.writelog('Aperture contains average %f of PSF.'%np.mean(event.aperfrac)) # save print("\nSaving ...") # denoised data: if event.denphot: killdata = 'dendata' else: killdata = 'data' me.saveevent(event, event.eventname + "_pht", delete=[killdata, 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.photdir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_pht.dat") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Photometry. Time (h:m:s): %s " % dt + " (" + photdir + ")") print("-------------- ------------\n") os.chdir(owd) if event.runp5: os.system("python3 poet.py p5 %s/%s" % (event.centerdir, event.photdir))
def badpix(eventname, control=None): tini = time.time() # Load the event event = me.loadevent(eventname) # Load the data me.updateevent(event, eventname, event.loadnext) # Create a new log starting from the old one. oldlogname = event.logname logname = event.eventname + ".log" log = le.Logedit(logname, oldlogname) event.logname = logname log.writelog('\nMARK: ' + time.ctime() + ': Starting poet_2badpix.') # ccampo 3/18/2011: do this in p5 # Julian observation date #event.fp.juldat = event.jdjf80 + event.fp.time / 86400.0 # ::::::::::::::::::::::: UNCERTAINTIES :::::::::::::::::::::::::::::::: # IRAC subarray data come with bogus uncertainties that are not linearly # related to photon noise. We scale them later, using the reduced chi # squared from the model fit. # ::::::::::::::::::::::: FLUX CONVERSION ::::::::::::::::::::::::::::: # Do we want flux (uJy/pix) or surface brightness (MJy/sr) units? If # doing photometry, convert to flux. Since we care about relative # numbers, it doesn't really matter. # Convert from surface brightness (MJy/sr) to flux units (uJy/pix) if event.fluxunits: log.writelog('Converting surface brightness to flux') event.data, event.uncd = btf.poet_bright2flux(event.data, event.uncd, event.posscl) if event.havecalaor: event.predata, event.preuncd = btf.poet_bright2flux( event.predata, event.preuncd, event.posscl) event.postdata, event.postuncd = btf.poet_bright2flux( event.postdata, event.postuncd, event.posscl) else: log.writelog('Did not convert bright to flux.') # Mean Background Estimate, from zodi model event.estbg = (np.mean(event.fp.zodi[np.where(event.fp.exist)]) + np.mean(event.fp.ism[np.where(event.fp.exist)]) + np.mean(event.fp.cib[np.where(event.fp.exist)])) if event.fluxunits: event.estbg *= (event.srperas * 1e12 * np.mean(event.posscl[0, :]) * np.mean(event.posscl[1, :])) # Bad Pixel Masking log.writelog('Find and fix bad pixels') # Get permanent bad pixel mask. if not event.ispmask[0]: log.writelog('\nPermanent Bad pixel mask not found!') else: hdu = pf.open(str(event.pmaskfile[0].decode('utf-8'))) if hdu[0].header['bitpix'] == -32: # if data type is float hdu[0].scale(type='int16') # cast it down to int16 event.pmask = hdu[0].data # IRS FIX: # IRS data contains the blue peak subarray while its pmask contains # the whole array (Hard coding) if event.photchan == 5: event.pmask = event.pmask[3:59, 86:127] # Do NOT define sigma, we have a different scheme for finding baddies # adds Spitzer rejects: fp.nsstrej & our rejects: fp.nsigrej event.mask = pbm.poet_badmask(event.data, event.uncd, event.pmask, event.inst.pcrit, event.bdmskd, event.inst.dcrit, event.fp, nimpos=event.nimpos) # User rejected pixels: if event.userrej != None: for i in np.arange(np.shape(event.userrej)[0]): event.mask[:, event.userrej[i, 0], event.userrej[i, 1], :] = 0 event.fp.userrej = np.sum(np.sum(1 - event.mask, axis=1), axis=1) event.fp.userrej = np.transpose(event.fp.userrej) - event.fp.nsstrej else: event.fp.userrej = np.zeros((int(event.npos), int(event.maxnimpos)), dtype=int) # define sigma here. # adds median sky: fp.medsky event.meanim = pcb.poet_chunkbad(event.data, event.uncd, event.mask, event.nimpos, event.sigma, event.szchunk, event.fp, event.nscyc) log.writelog('Masks combined') if event.havecalaor: event.premask = pbm.poet_badmask(event.predata, event.preuncd, event.pmask, event.inst.pcrit, event.prebdmskd, event.inst.dcrit, event.prefp, nimpos=event.calnimpos) event.premeanim = pcb.poet_chunkbad(event.predata, event.preuncd, event.premask, event.calnimpos, event.sigma, event.szchunk, event.prefp, event.nscyc) event.postmask = pbm.poet_badmask(event.postdata, event.postuncd, event.pmask, event.inst.pcrit, event.postbdmskd, event.inst.dcrit, event.postfp, nimpos=event.calnimpos) event.postmeanim = pcb.poet_chunkbad(event.postdata, event.postuncd, event.postmask, event.calnimpos, event.sigma, event.szchunk, event.postfp, event.nscyc) # Save the data if event.instrument == 'mips': todel = ['bdmskd', 'brmskd'] # what to delete else: todel = ['bdmskd'] me.saveevent(event, event.eventname + "_bpm", save=['data', 'uncd', 'mask'], delete=todel) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files:") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_bpm.dat") log.writelog(" " + cwd + event.eventname + "_bpm.h5") log.writelog("Log:") log.writelog(" " + cwd + logname) dt = t.hms_time(time.time() - tini) log.writeclose('\nBad pixel masking time (h:m:s): %s ' % dt)
def denoise(pcf, denoisedir): tini = time.time() # Create denoising log logname = event.logname log = le.Logedit(denoisedir + "/" + logname, logname) log.writelog("\nStart " + denoisedir + " denoising: " + time.ctime()) os.chdir(denoisedir) # copy denoise.pcf in denoisedir pcf.make_file("denoise.pcf") # Parse the attributes from the control file to the event: attrib = vars(pcf) keys = attrib.keys() for key in keys: if key != 'srcest': setattr(event, key, attrib.get(key).get()) for pos in range(event.npos): # Plot histogram of noisy wavelet coefficients ylim = histwc(event, event.wavelet, event.numlvls + 1, pos, log=log, denoised=False) # Plot first 'length' frames of noisy lightcurve at pixel srcest plotlc(event, pos, length=200, denoised=False) ''' maxlvls = pywt.dwt_max_level(event.nimpos[pos], pywt.Wavelet(event.wavelet)) # Determine the number of levels to denoise for i in range(1,maxlvls+1): if (2**i)*event.framtime < event.maxtime: numlvls = i else: break ''' log.writelog("Denoising will occur on the lowest " + str(event.numlvls) + " levels at position " + str(pos) + ".") # Determine the time resolution of the highled denoised level timeres = 2**(event.numlvls) * event.framtime log.writelog("Time resolution for position " + str(pos) + ", level " + str(event.numlvls) + " is " + str(timeres) + " seconds.") # Assess presence of NaNs and Infs in masked data print("Checking for NaNs and Infs.") data = (event.data[:, :, :, pos])[np.where(event.mask[:, :, :, pos])] if (np.sum(np.isnan(data)) + np.sum(np.isinf(data))) > 0: log.writelog( "***WARNING: Found NaNs and/or Infs in masked data at position " + str(pos) + ".") del (data) pool = mp.Pool(event.ncpu) for i in range(event.nx): for j in range(event.ny): #res=bayesshrink((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]) #writedata(res) exec( 'res = pool.apply_async(' + event.threshold + ',((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata)' ) #res = exec('pool.apply_async(' + event.threshold + ',((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata)') #res = pool.apply_async(event.threshold,((event.data[:,j,i,pos])[np.where(event.mask[:,j,i,pos])], event.wavelet, event.numlvls, [j,i,pos]),callback=writedata) pool.close() pool.join() #res.wait() #Plot histogram of denoised wavelet coefficients histwc(event, event.wavelet, event.numlvls + 1, pos, log=log, denoised=True, ylim=ylim) # Plot first 'length' frames of denoised lightcurve at pixel srcest plotlc(event, pos, length=200, denoised=True) # Save print("\nFinished Denoising. Saving.") me.saveevent(event, event.eventname + "_den", save=['data', 'uncd', 'mask']) # Print time elapsed and close log: cwd = os.getcwd() + "/" log.writelog("Output files (" + event.denoisedir + "):") log.writelog("Data:") log.writelog(" " + cwd + event.eventname + "_den.dat") log.writelog(" " + cwd + event.eventname + "_den.h5") log.writelog("Log:") log.writelog(" " + cwd + event.logname) dt = t.hms_time(time.time() - tini) log.writeclose("\nEnd Denoising. Time (h:m:s): %s" % dt + " (" + event.denoisedir + ")") print("------------- ------------\n") return