def estimateSnr(time, flux, flags, period_days, epoch_bkjd, \ duration_hrs, depth_frac, nDurForClip=2): """Estimate the SNR of a transit. SNR is defined as (transit depth) / (rms scatter). Transit depth is an input parameter, snr is calculated with the Marshall method Inputs: ------------- time, flux, flags (np 1d arrays) arrays of time flux and flag values. All flag values > 0 are treated as though they indicate bad data. period_days, epoch_bkjd, duration_hrs, depth_frac (floats) Parameters of transit Optional Inputs: ---------------- nDurForClip Points within nDurForClip*duration_hrs around each transit are excluded from the estimate of noise. """ dur_days = duration_hrs / 24. idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ dur_days, nDurForClip, flags=flags) if np.all(idx): msg = "All cadences seem to be in or near transit: " msg += "Period %.1f Duration %.2f hrs" % (period_days, duration_hrs) raise ValueError(msg) #import pdb; pdb.set_trace() idx |= flags > 0 #Remove data flagged as bad idx |= ~np.isfinite(time) #Or otherwise NaN idx |= ~np.isfinite(flux) #Or otherwise NaN idx |= outliers.indexOfOutliers(flux) #Remove outliers #No good cadences for some reason. if np.all(idx): raise ValueError( "No good cadences found for noise estimate. Check transit duration" ) assert (np.all(np.isfinite(flux[~idx]))) expTime_days = np.median(np.diff(time[~idx])) duration_cadences = dur_days / expTime_days #Duration must be at least 4 cadences of sgCdpp will crash duration_cadences = max(duration_cadences, 4) rms = noise.computeSgCdpp_ppm(flux[~idx], duration_cadences) * 1e-6 idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ dur_days, 1, flags=flags) nCadenceInTransit = np.sum(idx) return depth_frac / rms * np.sqrt(nCadenceInTransit)
def estimateSnr(time, flux, flags, period_days, epoch_bkjd, \ duration_hrs, depth_frac, nDurForClip=2): """Estimate the SNR of a transit. SNR is defined as (transit depth) / (rms scatter). Transit depth is an input parameter, snr is calculated with the Marshall method Inputs: ------------- time, flux, flags (np 1d arrays) arrays of time flux and flag values. All flag values > 0 are treated as though they indicate bad data. period_days, epoch_bkjd, duration_hrs, depth_frac (floats) Parameters of transit Optional Inputs: ---------------- nDurForClip Points within nDurForClip*duration_hrs around each transit are excluded from the estimate of noise. """ dur_days = duration_hrs / 24. idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ dur_days, nDurForClip, flags=flags) if np.all(idx): msg = "All cadences seem to be in or near transit: " msg += "Period %.1f Duration %.2f hrs" %(period_days, duration_hrs) raise ValueError(msg) idx |= flags > 0 #Remove data flagged as bad idx |= ~np.isfinite(time) #Or otherwise NaN idx |= ~np.isfinite(flux) #Or otherwise NaN idx |= outliers.indexOfOutliers(flux) #Remove outliers #No good cadences for some reason. if np.all(idx): raise ValueError("No good cadences found for noise estimate. Check transit duration") assert( np.all(np.isfinite(flux[~idx]))) expTime_days = np.median(np.diff(time[~idx])) duration_cadences = dur_days/expTime_days #Duration must be at least 4 cadences of sgCdpp will crash duration_cadences = max(duration_cadences, 4) rms = noise.computeSgCdpp_ppm(flux[~idx], duration_cadences)*1e-6 idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ dur_days, 1, flags=flags) nCadenceInTransit = np.sum(idx) return depth_frac/rms * np.sqrt(nCadenceInTransit)
def plotWrapper(clip): """Wrapper function for difference image centroid diagnostic plots Call this function from the exporter. Inputs: ----------- clip A clipboard object. Should have the following keys: serve, detrend, diffImg, rollPhase, trapFit Returns: ----------- Two figure handles. The zeroth figure handle shows the flux and rollPhase plot, the first shows the centroid offset plot Outputs: ------------ Two figures are produced. """ time = clip['serve.time'] qFlags = clip['serve.flags'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] cube = clip['serve.cube'] inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, duration_hrs / 24., flags=flags) if clip['config.detrendType'] != "tess" and (clip['config.detrendType'] != "eleanor"): rollPhase = clip['rollPhase.rollPhase'] centroids = clip['diffImg.centroid_timeseries'] goodCentroidIndices = centroids[centroids[:, 1] > 1, 0].asarray().astype(int) fig1 = mp.figure(1) mp.clf() multiPanelPlotDiffImgCentroidsDiagnostic(time, flux, flags, rollPhase, inTransitIndices, goodCentroidIndices, qFlags) fig2 = mp.figure(2) mp.clf() try: titleStr = PLOT_CENTROID_OFFSETS_VBK(clip) # titleStr = "EPIC: %i %s" %(epic, titleStr) except ValueError, e: titleStr = "Error: %s" % (e)
def getIngressEgressCadences(time, period_days, epoch_btjd, duration_days): assert np.all(np.isfinite(time)) idx = kplrfits.markTransitCadences(time, period_days, epoch_btjd, duration_days) transits = np.array(plateau(idx, .5)) return transits
def plotWrapper(clip): """Wrapper function for difference image centroid diagnostic plots Call this function from the exporter. Inputs: ----------- clip A clipboard object. Should have the following keys: serve, detrend, diffImg, rollPhase, trapFit Returns: ----------- Two figure handles. The zeroth figure handle shows the flux and rollPhase plot, the first shows the centroid offset plot Outputs: ------------ Two figures are produced. """ time = clip['serve.time'] qFlags = clip['serve.flags'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] centroids = clip['diffImg.centroid_timeseries'] rollPhase = clip['rollPhase.rollPhase'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] # tce = clip['eventList'][0] # period = tce['trapFit.period_days'] # epoch = tce['trapFit.epoch_bkjd'] # duration_hrs = tce['trapFit.duration_hrs'] inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ duration_hrs/24., flags=flags) goodCentroidIndices = centroids[ centroids[:,1]>1, 0].asarray().astype(int) f1 = mp.figure(1) mp.clf() multiPanelPlotDiffImgCentroidsDiagnostic(time, flux, flags, rollPhase, \ inTransitIndices, goodCentroidIndices, qFlags) f2 = mp.figure(2) mp.clf() try: titleStr = plotCentroidOffsets(centroids) titleStr = "EPIC: %i %s" %(epic, titleStr) except ValueError, e: titleStr = "Error: %s" %(e) mp.axis([-1,1,-1,1])
def getData(fn): clip = dpc.loadClipboard(fn) clip = pl.serveTask(clip) out = [clip['value']] for k in "period epoch depth duration_hrs".split(): key1 = "bls.%s" %(k) out.extend( [clip[key1]]) #Recompute SNR time = clip['serve.time'] flux = clip['detrend.flux_frac'] flag = clip['detrend.flags'] per = clip['bls.period'] epc = clip['bls.epoch'] depth_frac = clip['bls.depth'] dur_days = clip['bls.duration_hrs']/24. #Try mesauring SNR assuming there is a transit and a secondary #we want to cut out. try: idx = kplrfits.markTransitCadences(time, per/2., epc, \ dur_days, flags=flag) idx = idx | flag snr = estSnrForK2(flux[~idx], depth_frac, dur_days) except ValueError: #If the above results in no data points, try just excising #the primary try: idx = kplrfits.markTransitCadences(time, per, epc, \ dur_days, flags=flag) idx = idx | flag snr = estSnrForK2(flux[~idx], depth_frac, dur_days) except ValueError: #Give up snr = -1 out.append(snr) print out[0], out[-1] return out
def getData(fn): clip = dpc.loadClipboard(fn) clip = pl.serveTask(clip) out = [clip['value']] for k in "period epoch depth duration_hrs".split(): key1 = "bls.%s" % (k) out.extend([clip[key1]]) #Recompute SNR time = clip['serve.time'] flux = clip['detrend.flux_frac'] flag = clip['detrend.flags'] per = clip['bls.period'] epc = clip['bls.epoch'] depth_frac = clip['bls.depth'] dur_days = clip['bls.duration_hrs'] / 24. #Try mesauring SNR assuming there is a transit and a secondary #we want to cut out. try: idx = kplrfits.markTransitCadences(time, per/2., epc, \ dur_days, flags=flag) idx = idx | flag snr = estSnrForK2(flux[~idx], depth_frac, dur_days) except ValueError: #If the above results in no data points, try just excising #the primary try: idx = kplrfits.markTransitCadences(time, per, epc, \ dur_days, flags=flag) idx = idx | flag snr = estSnrForK2(flux[~idx], depth_frac, dur_days) except ValueError: #Give up snr = -1 out.append(snr) print out[0], out[-1] return out
def searchForEvent(clip): subClip = clip.shallowCopy() originalKeyList = subClip.keys() taskList = clip['config.searchTaskList'] #Set the flags attribute of the new subclip #Problem with this code is it closely tied to the behaviour #of multiEventSearchTask try: tmp = clip.eventList[-1] flags = tmp['flags'] except (IndexError, KeyError): flags = clip['detrend.flags'] subClip['flags'] = flags #Check that all the tasks are properly defined for t in taskList: f = eval(t) #Now run them. for t in taskList: f = eval(t) subClip = f(subClip) # #@TODO List of tasks to run should be config param # subClip = placeholderBls(subClip) # subClip = trapezoidFitTask(subClip) # subClip = modshiftTask(subClip) # subClip = measureDiffImgCentroidsTask(subClip) # subClip = dispositionTask(subClip) newKeys = list(set(subClip.keys()) - set(originalKeyList)) out = clipboard.Clipboard(__meta__=subClip['__meta__']) for k in newKeys: out[k] = subClip[k] #Mark all locations for this event as data not to be used. time = subClip['serve.time'] period_days = subClip['trapFit.period_days'] epoch_bkjd = subClip['trapFit.epoch_bkjd'] duration_days = subClip['trapFit.duration_hrs'] / 24. # assert(np.all(np.isfinite(time[~flags]))) # assert(np.any(flags)) idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, \ duration_days, numberOfDurations=2, flags=flags) out['flags'] = flags | idx return out
def computeSweetMetrics(time, flux, period, epoch, duration): """ period, epoch and duration all in same units (e.g days) """ assert len(time) == len(flux) out = [] idx = markTransitCadences(time, period, epoch, duration) for per in [period / 2., period, 2 * period]: phase = np.fmod(time - epoch + per, per) amp, ampUnc = SweetFitOotFlux(phase[~idx], flux[~idx]) out.append([amp, ampUnc, amp / ampUnc]) return np.array(out)
def skyLinePlot(clipList): """A plot of which cadences contribute to the most transits Based on similar plot created by Jessie Christiansen for the SOC pipeline. Inputs: ----------- clipList (list) list of filenames of clips to process """ epic, vals = gather.gatherFunction(clipList, getPeriodEpochDuration) clip = dpc.loadClipboard(clipList[0]) clip = pl.serveTask(clip) time = clip['serve.time'] flags = clip['detrend.flags'] period = np.array(map(lambda x: x[0], vals)) epoch = np.array(map(lambda x: x[1], vals)) duration_days = np.array(map(lambda x: x[2], vals)) / 24. isCand = np.array(map(lambda x: x[3], vals)) skyLine = time * 0 candSkyLine = time * 0 for i in range(len(period)): idx = kplrfits.markTransitCadences(time, period[i], epoch[i], \ duration_days[i], flags=flags) skyLine[idx] += 1 if isCand[i]: candSkyLine[idx] += 1 mp.clf() mp.step(time[~flags], skyLine[~flags], 'b-', lw=2, \ label="All targets") mp.step(time[~flags], candSkyLine[~flags], 'r-', lw=2, \ label="Candidates") mp.xlabel("Time (BKJD)") mp.ylabel("Number of Transits on Cadence") return mp.gcf()
def getIngressEgressCadences(time, period_days, epoch_btjd, duration_days): """Get a list of transit start and end times in units of cadence number Inputs ---------- Returns ---------- A 2d numpy array. zeroth column is cadence number of transit starts, first column is cadence number of transit ends. """ assert np.all(np.isfinite(time)) idx = kplrfits.markTransitCadences(time, period_days, epoch_btjd, duration_days) transits = np.array(plateau(idx, .5)) return transits
def fblsTask(clip): time_days = clip['extract.time'] flux_norm = clip['detrend.flux_frac'] flags = clip['detrend.flags'] minPeriod = clip['config.blsMinPeriod'] maxPeriod = clip['config.blsMaxPeriod'] # durations = np.array([ 2,4,6,8, 10, 12])/24. durations = np.array([ 4,6,8, 10, 12])/24. idx = flags == 0 blsObj = fbls.BlsSearch(time_days[idx], flux_norm[idx], \ [minPeriod, maxPeriod], durations) period, epoch, depth, duration = blsObj.getEvent() spectrum = blsObj.compute1dBls() duration_cadences = int(np.round(duration*48)) #Correct for K2 rms = noise.computeSgCdpp_ppm(flux_norm[idx], duration_cadences)*1e-6 idx = kplrfits.markTransitCadences(time_days, period, epoch, \ duration, flags=flags) snr = (depth/rms)*np.sqrt(np.sum(idx)) out = dict() out['period'] = period out['epoch'] = epoch out['duration_hrs'] = duration * 24 out['depth'] = depth out['snr'] = snr out['bls_search_periods'] = spectrum[:,0] out['convolved_bls'] = spectrum[:,1] #out['obj'] = blsObj # out['bls'] = bls #bls array is extremely big clip['bls'] = out #Enforce contract clip['bls.period'] clip['bls.epoch'] clip['bls.duration_hrs'] return clip
def fblsTask(clip): time_days = clip['extract.time'] flux_norm = clip['detrend.flux_frac'] flags = clip['detrend.flags'] minPeriod = clip['config.blsMinPeriod'] maxPeriod = clip['config.blsMaxPeriod'] # durations = np.array([ 2,4,6,8, 10, 12])/24. durations = np.array([4, 6, 8, 10, 12]) / 24. idx = flags == 0 blsObj = fbls.BlsSearch(time_days[idx], flux_norm[idx], \ [minPeriod, maxPeriod], durations) period, epoch, depth, duration = blsObj.getEvent() spectrum = blsObj.compute1dBls() duration_cadences = int(np.round(duration * 48)) #Correct for K2 rms = noise.computeSgCdpp_ppm(flux_norm[idx], duration_cadences) * 1e-6 idx = kplrfits.markTransitCadences(time_days, period, epoch, \ duration, flags=flags) snr = (depth / rms) * np.sqrt(np.sum(idx)) out = dict() out['period'] = period out['epoch'] = epoch out['duration_hrs'] = duration * 24 out['depth'] = depth out['snr'] = snr out['bls_search_periods'] = spectrum[:, 0] out['convolved_bls'] = spectrum[:, 1] #out['obj'] = blsObj # out['bls'] = bls #bls array is extremely big clip['bls'] = out #Enforce contract clip['bls.period'] clip['bls.epoch'] clip['bls.duration_hrs'] return clip
def measureDiffOffset(period_days, epoch_bkjd, duration_hrs, \ time, prfObj, ccdMod, ccdOut, cube, bbox, rollPhase, flags, qFlags): """Measure Centroid shift between intransit and difference image for every in-transit cadence Inputs: ----------- period_days, epoch_bkjd, duration_hrs (floats) Properties of transit time_bkjd Array of times per cadence for the given campaign prfObj An object of the class prf.KeplerPrf() ccdMod, ccdOut (int) CCD module and output of image. Needed to create the correct PRF model cube (3d np array) A data cube created from a TPF file. See fileio.tpf.getTargetPixelArrayFromFits() bbox [c1, c2, r1, r2]. Define the range of columns (c1..c2) and rows (r1..r2) defined by the image. An exception raised if the following equality not true img.shape = (c2-c1), (r2-r1) rollPhase (1d np array) An array of roll phases for each row of cube. len(rollPhase) == len(cube). Units of this array don't matter, so long as cadences with similar roll angles have similar values of rollPhase. Roll phases for bad cadences should be set to a bad value flags (1d array) flag values indicating bad cadences. Currently a non-zero value of flags indicates a bad cadence. qFlags (1d array) SAP Quality flags from lightcurve files Returns: ------------- A array with 5 columns, and as many rows as there are in transit cadences. The columns are 0: Relative cadence number 1: In transit centroid column 2: In transit centroid row 3: Diff img centroid column 4: Diff img centroid row If there is a statisically significant difference between the intransit and difference image centroids then the transit is most likely not on the target. """ duration_days = duration_hrs / 24. log = [] # idx = getIndicesInTransit(period_days, epoch_bkjd, duration_hrs, time) idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd,\ duration_days, flags=flags) wh = np.where(idx)[0] out = -1 * np.ones((len(wh), 5)) diagnostics = range(len(wh)) for i, w in enumerate(wh): out[i, 0] = w try: out[i, 1:], dDict = measureInTransitAndDiffCentroidForOneImg(\ prfObj, ccdMod, ccdOut, cube, w, bbox, rollPhase, qFlags, \ hdr=None, plot=False) diagnostics[i] = dDict except ValueError, e: log.append("Img %i: %s" % (w, e)) pass
def psfCentroids_vbk(clip): itrCol, itrRow, itr_cov = [], [], [] ootCol, ootRow, oot_cov = [], [], [] diffCol, diffRow, diff_cov = [], [], [] time = clip['serve.time'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] cube = clip['serve.cube'] hdr_ = clip['serve.tpfHeader'] if clip['config.detrendType'] == "tess": col_zero_, row_zero_ = int(hdr_['1CRV4P']), int(hdr_['2CRV4P']) epic_Col, epic_Row = col_zero_ + int(hdr_['1CRPX4']), row_zero_ + int(hdr_['2CRPX4']) if clip['config.detrendType'] == "eleanor": col_zero_, row_zero_ = int(hdr_['CRPIX1']), int(hdr_['CRPIX2']) epic_Col, epic_Row = col_zero_ + int(hdr_['TPF_H']), row_zero_ + int(hdr_['TPF_W']) inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, duration_hrs/24., flags=flags) oot_cadence_ = np.where((inTransitIndices == False) & (flags == False)) oot_cadence = np.asarray(oot_cadence_).flatten() itr_cadence_ = np.where(inTransitIndices == True) itr_cadence = np.asarray(itr_cadence_).flatten() # # # GET IN-TRANSIT, BEFORE TRANSIT, AND AFTER TRANSIT CADENCES ONLY # # transit_number_ = 1 transits_ = [itr_cadence[0]] tmp_idx_, = np.where(oot_cadence < transits_[0]) no_transits_ = oot_cadence[tmp_idx_[-1]] for ii in range(1,len(itr_cadence)): if itr_cadence[ii] - itr_cadence[ii-1] > 10: transit_number_ += 1 transits_ = np.hstack((transits_ , itr_cadence[ii-1], itr_cadence[ii])) transits_ = np.hstack((transits_ , itr_cadence[-1])) cube_back_ = cube if clip['config.detrendType'] == "tess": cut_ = 3 cube = cube[:,epic_Col-col_zero_-cut_:epic_Col-col_zero_+ cut_, epic_Row-row_zero_-cut_:epic_Row-row_zero_+ cut_] if clip['config.detrendType'] == "eleanor": cut_ = 5 cube = cube[:,epic_Col-col_zero_-cut_:epic_Col-col_zero_+ cut_, epic_Row-row_zero_-cut_:epic_Row-row_zero_+ cut_] for ii in range(transit_number_): number_of_cadences_in_transit_ = transits_[2*ii+1] - transits_[2*ii] idx_in_transit_ = np.linspace(transits_[2*ii], transits_[2*ii+1], int(number_of_cadences_in_transit_+1)) idx_in_transit = [int(aa) for aa in idx_in_transit_] idx_before_, = np.where(oot_cadence < transits_[2*ii]) idx_before = oot_cadence[idx_before_[-1-number_of_cadences_in_transit_:]] idx_after_, = np.where(oot_cadence > transits_[2*ii+1]) idx_after = oot_cadence[idx_after_[0:number_of_cadences_in_transit_+1]] itr_mean_img_by_transit_ = np.nanmean(cube[idx_in_transit,:,:], axis = 0) before_tr_mean_img_by_transit_ = np.nanmean(cube[idx_before,:,:], axis = 0) after_tr_mean_img_by_transit_ = np.nanmean(cube[idx_after,:,:], axis = 0) oot_mean_img_by_transit_ = 0.5*(before_tr_mean_img_by_transit_ + after_tr_mean_img_by_transit_) diff_mean_img_by_transit_ = oot_mean_img_by_transit_ - itr_mean_img_by_transit_ itrCol_by_transit_, itrRow_by_transit_, itr_cov_by_transit_ = intertial_axis(itr_mean_img_by_transit_) ootCol_by_transit_, ootRow_by_transit_, oot_cov_by_transit_ = intertial_axis(oot_mean_img_by_transit_) diffCol_by_transit_, diffRow_by_transit_, diff_cov_by_transit_ = intertial_axis(diff_mean_img_by_transit_) itrCol, itrRow = np.hstack((itrCol, itrCol_by_transit_)), np.hstack((itrRow, itrRow_by_transit_)) ootCol, ootRow = np.hstack((ootCol, ootCol_by_transit_)), np.hstack((ootRow, ootRow_by_transit_)) diffCol, diffRow = np.hstack((diffCol, diffCol_by_transit_)), np.hstack((diffRow, diffRow_by_transit_)) return cut_+itrCol, cut_+itrRow, cut_+ootCol, cut_+ootRow, cut_+diffCol, cut_+diffRow
def generateImages(clip): time = clip['serve.time'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] cube = clip['serve.cube'] hdr_ = clip['serve.tpfHeader'] inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, duration_hrs / 24., flags=flags) oot_cadence_ = np.where((inTransitIndices == False) & (flags == False)) oot_cadence = np.asarray(oot_cadence_).flatten() itr_cadence_ = np.where(inTransitIndices == True) itr_cadence = np.asarray(itr_cadence_).flatten() # # # GET IN-TRANSIT, BEFORE TRANSIT, AND AFTER TRANSIT CADENCES ONLY # # transit_number_ = 1 transits_ = [itr_cadence[0]] tmp_idx_, = np.where(oot_cadence < transits_[0]) no_transits_ = oot_cadence[tmp_idx_[-1]] for ii in range(1, len(itr_cadence)): if itr_cadence[ii] - itr_cadence[ii - 1] > 10: transit_number_ += 1 transits_ = np.hstack( (transits_, itr_cadence[ii - 1], itr_cadence[ii])) transits_ = np.hstack((transits_, itr_cadence[-1])) ss_ = cube.shape itr_mean_cube_ = np.zeros((transit_number_, ss_[1], ss_[2])) oot_mean_cube_ = np.zeros((transit_number_, ss_[1], ss_[2])) diff_mean_cube_ = np.zeros((transit_number_, ss_[1], ss_[2])) for ii in range(transit_number_): number_of_cadences_in_transit_ = transits_[2 * ii + 1] - transits_[2 * ii] idx_in_transit_ = np.linspace(transits_[2 * ii], transits_[2 * ii + 1], int(number_of_cadences_in_transit_ + 1)) idx_in_transit = [int(aa) for aa in idx_in_transit_] idx_before_, = np.where(oot_cadence < transits_[2 * ii]) idx_before = oot_cadence[idx_before_[-1 - number_of_cadences_in_transit_:]] idx_after_, = np.where(oot_cadence > transits_[2 * ii + 1]) idx_after = oot_cadence[idx_after_[0:number_of_cadences_in_transit_ + 1]] itr_mean_img_by_transit_ = np.nanmean(cube[idx_in_transit, :, :], axis=0) before_tr_mean_img_by_transit_ = np.nanmean(cube[idx_before, :, :], axis=0) after_tr_mean_img_by_transit_ = np.nanmean(cube[idx_after, :, :], axis=0) oot_mean_img_by_transit_ = 0.5 * (before_tr_mean_img_by_transit_ + after_tr_mean_img_by_transit_) diff_mean_img_by_transit_ = oot_mean_img_by_transit_ - itr_mean_img_by_transit_ itr_mean_cube_[ii, :, :] = itr_mean_img_by_transit_ oot_mean_cube_[ii, :, :] = oot_mean_img_by_transit_ diff_mean_cube_[ii, :, :] = diff_mean_img_by_transit_ itr_mean_img_ = np.nanmean(itr_mean_cube_, axis=0) oot_mean_img_ = np.nanmean(oot_mean_cube_, axis=0) diff_mean_img_ = oot_mean_img_ - itr_mean_img_ return itr_mean_img_, oot_mean_img_, diff_mean_img_, itr_mean_cube_, oot_mean_cube_, diff_mean_cube_, transit_number_
def plot_DiffImg_and_Centroids( clip): #cube, centroids, goodCentroidIndices, rollPhase, quality): time = clip['serve.time'] qFlags = clip['serve.flags'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] centroids = clip['diffImg.centroid_timeseries'] rollPhase = clip['rollPhase.rollPhase'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] cube = clip['serve.cube'] hdr_ = clip['serve.tpfHeader'] col_zero_, row_zero_ = int(hdr_['1CRV4P']), int(hdr_['2CRV4P']) epic_Col, epic_Row = col_zero_ + int(hdr_['1CRPX4']), row_zero_ + int( hdr_['2CRPX4']) idx = centroids[:, 1] > 0 cin = centroids[idx, 0] ootCol, ootRow = centroids[idx, 1], centroids[idx, 2] diffCol, diffRow = centroids[idx, 3], centroids[idx, 4] inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, duration_hrs / 24., flags=flags) goodCentroidIndices = centroids[centroids[:, 1] > 1, 0].asarray().astype(int) oot_cadence_, itr_cadence_ = np.where(inTransitIndices == False), np.where( inTransitIndices == True) oot_mean_img_, itr_mean_img_ = np.nanmean( cube[oot_cadence_], axis=0), np.nanmean(cube[itr_cadence_], axis=0) diff_mean_img_ = oot_mean_img_ - itr_mean_img_ ss_ = oot_mean_img_.shape extent_ = [col_zero_, col_zero_ + ss_[1], row_zero_, row_zero_ + ss_[0]] disp = lambda x: mp.imshow(x, cmap=mp.get_cmap('binary', 512), origin="bottom", interpolation="nearest", extent=extent_) n_panels_ = 5 * 4 skip_ = 3 * 15 for ii in range(n_panels_ - 1): print ii mp.subplot(5, 4, ii + 1) diff_, oot_, diag = diffimg.constructK2DifferenceImage( cube, goodCentroidIndices[skip_ + ii], rollPhase, flags) disp(diff_) mp.plot(ootCol[skip_ + ii], ootRow[skip_ + ii], 'c*', ms=10) mp.plot(diffCol[skip_ + ii], diffRow[skip_ + ii], 'mo', ms=6) mp.title(time[goodCentroidIndices[skip_ + ii]], fontsize=10) mp.gca().axes.get_xaxis().set_visible(False) mp.gca().axes.get_yaxis().set_visible(False) mp.tight_layout() mp.subplot(5, 4, n_panels_) disp(diff_mean_img_) mp.scatter(diffCol[skip_ + 0:skip_ + n_panels_], diffRow[skip_ + 0:skip_ + n_panels_], marker='o', c=cin[skip_ + 0:skip_ + n_panels_], s=40, linewidths=0, cmap=mp.cm.RdYlBu) cb = mp.colorbar() cb.set_label("Cadence")
def measureDiffOffset(period_days, epoch_bkjd, duration_hrs, \ time, prfObj, ccdMod, ccdOut, cube, bbox, rollPhase, flags, qFlags): """Measure Centroid shift between intransit and difference image for every in-transit cadence Inputs: ----------- period_days, epoch_bkjd, duration_hrs (floats) Properties of transit time_bkjd Array of times per cadence for the given campaign prfObj An object of the class prf.KeplerPrf() ccdMod, ccdOut (int) CCD module and output of image. Needed to create the correct PRF model cube (3d np array) A data cube created from a TPF file. See fileio.tpf.getTargetPixelArrayFromFits() bbox [c1, c2, r1, r2]. Define the range of columns (c1..c2) and rows (r1..r2) defined by the image. An exception raised if the following equality not true img.shape = (c2-c1), (r2-r1) rollPhase (1d np array) An array of roll phases for each row of cube. len(rollPhase) == len(cube). Units of this array don't matter, so long as cadences with similar roll angles have similar values of rollPhase. Roll phases for bad cadences should be set to a bad value flags (1d array) flag values indicating bad cadences. Currently a non-zero value of flags indicates a bad cadence. qFlags (1d array) SAP Quality flags from lightcurve files Returns: ------------- A array with 5 columns, and as many rows as there are in transit cadences. The columns are 0: Relative cadence number 1: In transit centroid column 2: In transit centroid row 3: Diff img centroid column 4: Diff img centroid row If there is a statisically significant difference between the intransit and difference image centroids then the transit is most likely not on the target. """ duration_days = duration_hrs/24. log = [] # idx = getIndicesInTransit(period_days, epoch_bkjd, duration_hrs, time) idx = kplrfits.markTransitCadences(time, period_days, epoch_bkjd,\ duration_days, flags=flags) wh = np.where(idx)[0] out = -1 * np.ones((len(wh), 5)) diagnostics = range(len(wh)) for i,w in enumerate(wh): out[i,0] = w try: out[i, 1:], dDict = measureInTransitAndDiffCentroidForOneImg(\ prfObj, ccdMod, ccdOut, cube, w, bbox, rollPhase, qFlags, \ hdr=None, plot=False) diagnostics[i] = dDict except ValueError, e: log.append("Img %i: %s" %(w, e)) pass
def PLOT_CENTROID_OFFSETS_VBK(clip): from astropy import coordinates, units as u, wcs from astroquery.skyview import SkyView from astroquery.vizier import Vizier import astropy.units as u import math from scipy.ndimage import rotate from reproject import reproject_interp, reproject_exact, reproject_to_healpix, reproject_from_healpix from astropy.wcs import WCS # import pywcsgrid2 time = clip['serve.time'] qFlags = clip['serve.flags'] flux = clip['detrend.flux_frac'] flags = clip['detrend.flags'] centroids = clip['diffImg.centroid_timeseries'] # rollPhase = clip['rollPhase.rollPhase'] period_days = clip['trapFit.period_days'] epoch_bkjd = clip['trapFit.epoch_bkjd'] duration_hrs = clip['trapFit.duration_hrs'] epic = clip['value'] cube = clip['serve.cube'] hdr_ = clip['serve.tpfHeader'] col_zero_, row_zero_ = int(hdr_['1CRV4P']), int(hdr_['2CRV4P']) epic_Col, epic_Row = col_zero_ + int(hdr_['1CRPX4']), row_zero_ + int( hdr_['2CRPX4']) def k2_ConvertHeaderWCS(tpf_header): funny_keywords = { '1CTYP4': 'CTYPE1', '2CTYP4': 'CTYPE2', '1CRPX4': 'CRPIX1', '2CRPX4': 'CRPIX2', '1CRVL4': 'CRVAL1', '2CRVL4': 'CRVAL2', '1CUNI4': 'CUNIT1', '2CUNI4': 'CUNIT2', '1CDLT4': 'CDELT1', '2CDLT4': 'CDELT2', '11PC4': 'PC1_1', '12PC4': 'PC1_2', '21PC4': 'PC2_1', '22PC4': 'PC2_2' } mywcs = {} for oldkey, newkey in funny_keywords.items(): mywcs[newkey] = tpf_header[oldkey] return wcs.WCS(mywcs) mywcs_ = k2_ConvertHeaderWCS(hdr_) # # inTransitIndices = kplrfits.markTransitCadences(time, period_days, epoch_bkjd, duration_hrs / 24., flags=flags) oot_cadence_ = np.where((inTransitIndices == False) & (flags == False)) oot_mean_img_ = np.nanmean(cube[oot_cadence_], axis=0) itr_cadence_ = np.where(inTransitIndices == True) itr_mean_img_ = np.nanmean(cube[itr_cadence_], axis=0) diff_mean_img_ = oot_mean_img_ - itr_mean_img_ ss_ = oot_mean_img_.shape # disp = lambda x: mp.imshow(x, cmap=mp.cm.binary, origin = "bottom", interpolation="nearest") extent_ = [col_zero_, col_zero_ + ss_[1], row_zero_, row_zero_ + ss_[0]] disp = lambda x: mp.imshow(x, cmap=mp.get_cmap('binary', 512), origin="bottom", interpolation="nearest", extent=extent_) # # GET CENTROIDS idx = centroids[:, 1] > 0 cin = centroids[idx, 0] ootCol = centroids[idx, 1] # - col_zero_# - 1 ootRow = centroids[idx, 2] # - row_zero_# - 1 #itr => in transit diffCol = centroids[idx, 3] # - col_zero_# - 1 diffRow = centroids[idx, 4] # - row_zero_# - 1 diffC = (ootCol - diffCol) # + np.median(diffCol) diffR = (ootRow - diffRow) # + np.median(diffRow) itrCol, itrRow = diffCol, diffRow # xmin_ = np.min(np.hstack((ootCol, diffCol))) xmax_ = np.max(np.hstack((ootCol, diffCol))) ymin_ = np.min(np.hstack((ootRow, diffRow))) ymax_ = np.max(np.hstack((ootRow, diffRow))) # START PLOTTING # ax1 = mp.subplot(221) disp(oot_mean_img_) ax1.plot(ootCol, ootRow, 'c*', ms=8) #, mec = 'm')#, color='yellow') ax1.plot(np.mean(ootCol), np.mean(ootRow), 'c*', ms=14, label='AVG_OOT') #, mec = 'm') ax1.plot(itrCol, itrRow, 'mo', ms=3) #, mec = 'c') ax1.plot(np.mean(itrCol), np.mean(itrRow), 'mo', ms=6, label='AVG_DIFF') #, mec = 'c') # covar.plotErrorEllipse(ootCol, ootRow, color='c', ms=14, marker='*', mfc='c') #, mec = 'm') covar.plotErrorEllipse(itrCol, itrRow, color='m', ms=14, marker='o', mfc='m') #, mec = 'c') ax1.plot(epic_Col, epic_Row, 'xy', mew=3, ms=10, label='EPIC') mp.xlabel(r"$\Delta$ Column (pixels)") mp.ylabel(r"$\Delta$ Row (pixels)") mp.legend(loc='best', fontsize=8) mp.subplot(222) mp.plot(np.mean(ootCol), np.mean(ootRow), 'c*', ms=20, label='OOT') mp.plot(np.mean(itrCol), np.mean(itrRow), 'mo', ms=20, label='DIFF') mp.scatter(ootCol, ootRow, marker='*', c=cin, s=64, linewidths=0, cmap=mp.cm.RdYlBu) mp.scatter(itrCol, itrRow, marker='o', c=cin, s=64, linewidths=0, cmap=mp.cm.RdYlBu) cb = mp.colorbar() cb.set_label("Cadence") covar.plotErrorEllipse(ootCol, ootRow, color='c', ms=20, marker='*', mfc='c') covar.plotErrorEllipse(itrCol, itrRow, color='m', ms=20, marker='o', mfc='m') mp.xlabel(r"$\Delta$ Column (pixels)") probOffset, chiSq = covar.computeProbabilityOfObservedOffset(diffC, diffR) titleStr = "Prob. On Target: %.1e: $\chi^2$: %.3f" % (1 - probOffset, chiSq) mp.legend(loc='best') mp.xlim(xmin_ - 0.2, xmax_ + 0.2) mp.ylim(ymin_ - 0.2, ymax_ + 0.2) mp.tight_layout() try: ax3 = mp.subplot(223, projection=mywcs_) #mywcs_) ra_, dec_ = hdr_['RA_OBJ'], hdr_['DEC_OBJ'] center_ = coordinates.SkyCoord(ra_, dec_, unit=(u.deg, u.deg), frame='icrs') img_survey = SkyView.get_images(position=center_, survey='2MASS-J', radius=1 * u.arcmin) pix_survey = img_survey[0][0].data hdr_survey = img_survey[0][0].header inverted_pix_survey = np.max(pix_survey) - pix_survey inverted_pix_survey = pix_survey #inverted_pix_survey/np.max(inverted_pix_survey) levels_ = np.linspace(np.min(inverted_pix_survey), np.percentile(inverted_pix_survey, 99), 10) ax3.contourf(inverted_pix_survey, transform=ax3.get_transform(wcs.WCS(hdr_survey)), levels=levels_, cmap=mp.get_cmap('binary', 256)) # # mp.tight_layout() except: mp.subplot(223) mp.subplot(224) titleStr_ = plotCentroidOffsets(centroids) return titleStr