class PlotTracks(OutputRoutine): """A routine that plots the tracks and save to file, it is to be used with GetTracks""" def __init__(self, output_dir, season='2016', array='AR3', spreads=True): OutputRoutine.__init__(self, output_dir) self._spreads = spreads # whether to plot spreads self._pr = None self._season = season self._array = array def initialize(self): self._pr = PixelReader(season=self._season, array=self._array) def execute(self): tod_id = self.get_context().get_id() tracks = self.get_context().get_store().get("tracks") print '[INFO] n_tracks = %d' % len(tracks) plt.figure(figsize=(10, 10)) self._pr.plot() # plot the array for track in tracks: if self._spreads: plt.scatter(track[:, 0], track[:, 1], marker='o', s=3000 * track[:, 2]**2, alpha=0.1) plt.plot(track[:, 0], track[:, 1]) else: plt.scatter(track[:, 0], track[:, 1], alpha=0.7) # save image print '[INFO] Saving image ...' plt.savefig(self._output_dir + "%d.png" % tod_id) print '[INFO] Image saved: %s%d.png' % (self._output_dir, tod_id)
def execute(self): self._pr = PixelReader(season = '2017', array=self.get_context().get_array()) print '[INFO] Getting timeseries...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] return time, d_1, d_2, d_3, d_4 self.get_store().set(self._output_key,timeseries)
class TimeSeries(Routine): """ A routine that returns a function to find the timeseries of a pixel in 4 frequencies """ def __init__(self, tod_key, output_key): Routine.__init__(self) self._tod_key = tod_key self._pr = None self._output_key = output_key """ def initialize(self): self._pr = PixelReader() """ def execute(self, store): #array_name = self.get_array() #self._pr = PixelReader(season = '2017', array=str(array_name)) #use this for covered TODs self._pr = PixelReader() #use this for uncovered TODs print '[INFO] Getting timeseries...' tod_data = store.get(self._tod_key) # retrieve tod_data def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] return time, d_1, d_2, d_3, d_4 store.set(self._output_key, timeseries)
def execute(self, store): print '[INFO] Plotting all glitches affecting detector ...' taus = store.get(self._time_constants) for tc in taus: if tc['det_uid'] == self._detuid: tau = tc['tau'] tod_data = store.get(self._tod_key) # retrieve tod_data cuts = store.get(self._cosig_key) # retrieve tod_data array_name = self.get_array() peaks = cuts['peaks'] self._pr = PixelReader() def cs_cuts(): cuts = store.get(self._cosig_key) return cuts['coincident_signals'] timeseries = store.get(self._timeseries_key) def plotter(pid, tau, start_time, end_time): x = timeseries(pid, start_time, end_time)[0] y1 = timeseries(pid, start_time, end_time)[1] y2 = timeseries(pid, start_time, end_time)[2] y3 = timeseries(pid, start_time, end_time)[3] y4 = timeseries(pid, start_time, end_time)[4] plt.title('Pixel affected from ' + str(start_time) + '-' + str(end_time) + ', Pixel ' + str(pid)) plt.xlabel('TOD track:' + str(self._tag) + ' Tau:' + str(tau)) plt.plot(x, y1, '.-', label='90 GHz') plt.plot(x, y2, '.-', label='90 GHz') plt.plot(x, y3, '.-', label='150 GHz') plt.plot(x, y4, '.-', label='150 GHz') plt.legend() plt.show() cs = cuts['coincident_signals'] for peak in peaks: stime = peak[0] etime = peak[1] pixels = pixels_affected_in_event(cs, peak) for pixel in pixels: if pixel == self._detuid: plotter(pixel, tau, stime, etime)
def execute(self, store): # get pixel reader pr = PixelReader(season='2017', array=self.get_array()) # retrieve events data from data store events_data = store.get(self._event_key) events = events_data['events'] nsamps = events_data['nsamps'] # retrieve tod data tod_data = store.get(self._tod_key) # loop through the given event id and see if it is in the # retrieved events list. for event in events: # check if the event is of interests if event['id'] in self._list_of_events: self.logger.info('Found event: %s' % event['id']) # retrieve event information # pick only one pixel for plotting pixels = event['pixels_affected'] start_time = event['start'] end_time = event['end'] # loop over pixels for pid in pixels: # get time series ctime, d1, d2, d3, d4 = timeseries(tod_data, pid, start_time, end_time, pr, buffer=20) for d in [d1, d2, d3, d4]: # rescale them d = (d - np.min(d)) / (np.max(d) - np.min(d)) # find maximum index d_i = np.argmax(d) # only start from maximum d_y = d[d_i:] d_x = np.arange(0, len(d_y)) # make a smooth curve # f = interp1d(d_x, d_y, kind='cubic') f = interp1d(d_x, d_y) d_x_new = np.linspace(0, len(d_y)-1, 100) self._ax.plot(d_x_new, f(d_x_new), 'r-', alpha=0.1)
def execute(self, store): array= self.get_array() pr = PixelReader(season='2017', array= array) self.logger.info('Plotting glitches ...') # retrieve tod_data tod_data = store.get(self._tod_key) # retrieve events events_data = store.get(self._event_key) events = events_data['events'] nsamps = events_data['nsamps'] # plot functions # plot all pixels affected given an array of pixel ids # and a starting time and ending time plt.figure(figsize=(8,8)) def plotter(ax, pixels, start_time, end_time): for pid in pixels: ctime, d1, d2, d3, d4 = timeseries(tod_data, pid, start_time, end_time, pr) ax.set_title('Pixels affected from ' +str(start_time)+ '-' + str(end_time)+ ' at 90 GHz') ax.set_xlabel('TOD_ID: %d TOD_NAME: %s' % (self.get_id(), self.get_name())) # CHANGE TOD TRACK NAME ax.plot(d1,'.-') ax.plot(d2,'.-') ax.plot(d3,'.-') ax.plot(d4,'.-') # trim the beginning and ending glitches, these are usually related to # re-biasing and not interesting TRIM = 100 events_trim = [e for e in events if (e['start'] > TRIM and e['end'] < nsamps-TRIM)] self.logger.info('nsamps: %d' % nsamps) # plot all pixels affected in a event one by one for all events for event in events_trim: self.logger.info(event) pixels_affected = event['pixels_affected'] start_time = event['start'] end_time = event['end'] fig, ax = plt.subplots() plotter(ax, pixels_affected, start_time, end_time) fig.savefig("outputs/nSig_10/plots/%s.png" % event['id']) plt.close('all')
def execute(self): print '[INFO] Checking for correlation ...' self._pr = PixelReader(season = '2017', array=self.get_context().get_array()) tod_data = self.get_store().get(self._tod_key) # retrieve tod_data events = self.get_store().get(self._input_key) peaks = [event['peak'] for event in events] timeseries = self.get_store().get(self._timeseries_key) def avg_signal(pixels, start_time, end_time): for pid in pixels: x, y1, y2, y3, y4 = timeseries(pid,start_time,end_time) avg_y1, avg_y2, avg_y3, avg_y4 = np.zeros(len(y1)),np.zeros(len(y2)),np.zeros(len(y3)),np.zeros(len(y4)) avg_x = x avg_y1 += y1 avg_y2 += y2 avg_y3 += y3 avg_y4 += y4 x = avg_x y1 = avg_y1/len(avg_y1) y2 = avg_y2/len(avg_y2) y3 = avg_y3/len(avg_y3) y4 = avg_y4/len(avg_y4) return x, y1,y2,y3,y4 def correlation(x1,x2,y1,y2): ts1 = y1 ts2 = y2 l1 = len(ts1) l2 = len(ts2) if l1 < l2: n = l1 return max([np.corrcoef(ts1, ts2[i:n+i])[0][1] for i in range(0, l2-l1)]) elif l2 < l1: n = l2 return max([np.corrcoef(ts1[i:n+i], ts2)[0][1] for i in range(0, l1-l2)]) else: return np.corrcoef(ts1, ts2)[0][1] avg_x1, avg_y1 = self._template[0], self._template[1] possible_events = [] highlylikely_events = [] lower_threshold = 0.6 upper_threshold = self._coeff for event in events: all_pixels = event['pixels_affected'] avg_x2, avg_y2_1,avg_y2_2,avg_y2_3,avg_y2_4 = avg_signal(all_pixels, event['start'], event['end']) coeff1 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_1) coeff2 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_2) coeff3 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_3) coeff4 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_4) if (lower_threshold <= coeff1) & (lower_threshold <= coeff2 ) & (lower_threshold <= coeff3) & (lower_threshold <= coeff4) & (coeff1 < upper_threshold) & (coeff2 < upper_threshold) & (coeff3 < upper_threshold) & (coeff4 < upper_threshold): possible_events.append(event) elif (coeff1 >= upper_threshold) & (coeff2 >= upper_threshold) & (coeff3 >= upper_threshold) & (coeff4 >= upper_threshold): highlylikely_events.append(event) #print highlylikely_events print '[INFO] Correlation events passed: %d / %d' % (len(highlylikely_events), len(peaks)) self.get_store().set(self._output_key,highlylikely_events)
class PlotEvents(Routine): """A routine that plot events""" def __init__(self, event_key, tod_key): Routine.__init__(self) self._event_key = event_key self._tod_key = tod_key self._pr = None def initialize(self): self._pr = PixelReader() def execute(self): print '[INFO] Plotting glitches ...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data events = self.get_store().get(self._event_key) # retrieve tod_data def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] """ UNCOMMENT TO PLOT FOUR CORRESPONDING PIXELS WITH HI-LO FREQ plt.plot(time,d_1, '.-', label=str(a1) + ' 90 GHz') plt.plot(time, d_2, '.-', label=str(a2) + ' 90 GHz') plt.plot(time, d_3, '.-', label=str(b1) + ' 150 GHz') plt.plot(time, d_4, '.-', label=str(b2) + ' 150 GHz') plt.legend(title='Detector UID') plt.show() """ return time, d_1 """ PLOTTING FUNCTION Plot all pixels affected given an array of pixel ids and a starting time and ending time """ plt.figure(figsize=(8, 8)) gridspec.GridSpec(11, 11) def plotter(pixels, start_time, end_time): plt.subplot2grid((11, 11), (0, 0), colspan=11, rowspan=3) for pid in pixels: x = timeseries(pid, start_time, end_time)[0] y = timeseries(pid, start_time, end_time)[1] plt.title('Pixels affected from ' + str(start_time) + '-' + str(end_time) + ' at 90 GHz') plt.xlabel( 'TOD_ID: %d TOD_NAME: %s' % (self.get_id(), self.get_name())) # CHANGE TOD TRACK NAME plt.plot(x, y, '.-') # plt.show() """ ALL EVENTS Plot all pixels affected in a event one by one for all events """ # old for event in events: pixels_affected = event['pixels_affected'] start_time = event['start'] end_time = event['end'] plotter(pixels_affected, start_time, end_time) print '[INFO] Pixel Location in Row and Col Space:' pix_max_amps = [] pix_max_x = [] pix_max_y = [] pix_location_row = [] pix_location_col = [] x, y = self._pr.get_x_y_array() plt.subplot2grid((11, 11), (4, 0), colspan=7, rowspan=7) plt.plot(x, y, 'r.') for pid in pixels_affected: # print '[INFO] Pixel #', pid, 'at', self._pr.get_row_col(pid) pixel_max_amp = np.amax( timeseries(pid, start_time, end_time)[1]) # print '[INFO] Maximum Amplitude of Pixel #', pid, 'is', pixel_max_amp x, y = self._pr.get_x_y(pid) pix_max_amps.append(pixel_max_amp) pix_max_x.append(x) pix_max_y.append(y) a1, a2 = self._pr.get_f1(pid) b1, b2 = self._pr.get_f2(pid) pix_location_row.append(self._pr.get_row_col(a1)[0]) pix_location_col.append(self._pr.get_row_col(a1)[1]) pix_location_row.append(self._pr.get_row_col(a2)[0]) pix_location_col.append(self._pr.get_row_col(a2)[1]) pix_location_row.append(self._pr.get_row_col(b1)[0]) pix_location_col.append(self._pr.get_row_col(b1)[1]) pix_location_row.append(self._pr.get_row_col(b2)[0]) pix_location_col.append(self._pr.get_row_col(b2)[1]) max_alpha = np.amax(pix_max_amps) for n in np.arange(0, len(pix_max_amps)): plt.plot(pix_max_x[n], pix_max_y[n], 'b.', alpha=0.8 * (pix_max_amps[n] / max_alpha), markersize=40) plt.subplot2grid((11, 11), (6, 8), colspan=4, rowspan=4) plt.plot(pix_location_col, pix_location_row, 'b.', alpha=1, markersize=10) plt.title('Loctaion of Affected Pixels', fontsize=10) plt.xticks( np.arange( min(pix_location_col) - 1, max(pix_location_col) + 2, 1.0)) plt.xlabel('Column', fontsize=8) plt.yticks( np.arange( min(pix_location_row) - 1, max(pix_location_row) + 2, 1.0)) plt.ylabel('Row', fontsize=8) plt.xticks(fontsize=5) plt.yticks(fontsize=5) plt.grid(color='k', linewidth=1) plt.show()
def initialize(self): self._pr = PixelReader()
class FindCosigs(Routine): """A routine that compiles the coincident signals from cuts""" #def __init__(self, season="2017", array="AR5", input_key="cuts", output_key="cosig", strict=True, polarized=False): def __init__(self, season="2016", input_key="cuts", output_key="cosig", strict=True, polarized=False): """ :param input_key: string :param output_key: string :param strict: boolean - Strict mode means each pixel must have 4 TES detectors. Loose mode means that each frequency has at least one TES detectors. :param polarized: boolean - True means that we are looking for potentially polarized signals. False means that we only look for un-polarized signals. """ Routine.__init__(self) self._input_key = input_key self._output_key = output_key self._pr = None self._strict = strict self._polarized = polarized self._season = season def initialize(self): if (not self._strict) and (not self._polarized): # give a warning print '[WARNING] Using loose mode for unpolarized signals may not be accurate' def execute(self): # retrieve all cuts self._pr = PixelReader(season=self._season, array=self.get_context().get_array()) cuts_data = self.get_store().get(self._input_key) # get saved cut data cuts = cuts_data['cuts'] nsamps = cuts_data['nsamps'] # get all pixels pixels = self._pr.get_pixels() # initialize dictionary to store coincident signals cosig = {} # loop through pixels and find cosig for each pixel for p in pixels: dets_f1 = self._pr.get_f1(p) dets_f2 = self._pr.get_f2(p) if self._strict: # strict mode, 4 TES have to be present if len(dets_f1) == 2 and len(dets_f2) == 2: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts( cuts_f1_A, cuts_f1_B ) # polarized spikes may appear at either pol cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts( cuts_f1_A, cuts_f1_B ) # unpolarized spikes appear in both pols cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id else: # loose mode, at least one TES has to be present each freq if len(dets_f1) == 2 and len(dets_f2) == 2: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts( cuts_f1_A, cuts_f1_B ) # polarized spikes may appear at either pol cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts( cuts_f1_A, cuts_f1_B ) # unpolarized spikes appear in both pols cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 1 and len(dets_f2) == 2: cuts_f1 = cuts.cuts[dets_f1[0]] # low freq, polarization A cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 2 and len(dets_f2) == 1: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2 = cuts.cuts[ dets_f2[0]] # high freq, polarization A if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts(cuts_f1_A, cuts_f1_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts(cuts_f1_A, cuts_f1_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 1 and len(dets_f2) == 1: cuts_f1 = cuts.cuts[dets_f1[0]] # low freq, polarization A cuts_f2 = cuts.cuts[ dets_f2[0]] # high freq, polarization A cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id # cosig may contain empty cut vectors because we didn't enforce it, filter them out now cosig_filtered = {} for pixel in cosig: cuts = cosig[pixel] if len(cuts) != 0: cosig_filtered[pixel] = cuts # save cosig for further processing self.get_store().set( self._output_key, cosig_filtered) # save the coincident signals under the output_key self.get_store().set( "nsamps", nsamps) # save the number of sampling points, not graceful
class CreateHistogram(Routine): def __init__(self, cosig_key, tod_key, event_key="events"): Routine.__init__(self) self._event_key = event_key self._hist = None self._tod_key = tod_key self._cosig_key = cosig_key self._pr = None def initialize(self): self._pr = PixelReader() self._hist = Hist1D(0, 5, 100) #change max def execute(self): cuts = self.get_store().get(self._cosig_key) peaks = cuts['peaks'] cosig = cuts['coincident_signals'] tod_data = self.get_store().get(self._tod_key) def energyseries(pixel, s_time, e_time, buffer=0): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel) b1, b2 = self._pr.get_f2(pixel) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] return time, d_1, d_2, d_3, d_4 def total_energy(pid, start_time, end_time): pix_all_amps = [] pix_all_amps.append( energyseries(pid, start_time, end_time, buffer=0)[1]) pix_all_amps.append( energyseries(pid, start_time, end_time, buffer=0)[2]) pix_all_amps.append( energyseries(pid, start_time, end_time, buffer=0)[3]) pix_all_amps.append( energyseries(pid, start_time, end_time, buffer=0)[4]) Det_pWatts_90_a = [] Det_pWatts_90_b = [] Det_pWatts_150_a = [] Det_pWatts_150_b = [] Det_pJoules_90_a = [] Det_pJoules_90_b = [] Det_pJoules_150_a = [] Det_pJoules_150_b = [] for i in range(0, len(pix_all_amps), 4): ampid_1 = pix_all_amps[i] array_min_1 = np.amin(ampid_1) new_pix_amps_1 = ampid_1 - array_min_1 pWatts_1 = np.sum(new_pix_amps_1) * 10**(12) / (400.) Det_pWatts_90_a.append(pWatts_1) Det_pJoules_90_a.append(pWatts_1 * (end_time - start_time)) ampid_2 = pix_all_amps[i + 1] array_min_2 = np.amin(ampid_2) new_pix_amps_2 = ampid_2 - array_min_2 pWatts_2 = np.sum(new_pix_amps_2) * 10**(12) / (400.) Det_pWatts_90_b.append(pWatts_2) Det_pJoules_90_b.append(pWatts_2 * (end_time - start_time)) ampid_3 = pix_all_amps[i + 2] array_min_3 = np.amin(ampid_3) new_pix_amps_3 = ampid_3 - array_min_3 pWatts_3 = np.sum(new_pix_amps_3) * 10**(12) / (400.) Det_pWatts_150_a.append(pWatts_3) Det_pJoules_150_a.append(pWatts_3 * (end_time - start_time)) ampid_4 = pix_all_amps[i + 3] array_min_4 = np.amin(ampid_4) new_pix_amps_4 = ampid_4 - array_min_4 pWatts_4 = np.sum(new_pix_amps_4) * 10**(12) / (400.) Det_pWatts_150_b.append(pWatts_4) Tot_pW_90a = np.sum(Det_pWatts_90_a) Tot_pW_90b = np.sum(Det_pWatts_90_b) Tot_pW_150a = np.sum(Det_pWatts_150_a) Tot_pW_150b = np.sum(Det_pWatts_150_b) Tot_pJ_90a = np.sum(Det_pJoules_90_a) Tot_pJ_90b = np.sum(Det_pJoules_90_b) Tot_pJ_150a = np.sum(Det_pJoules_150_a) Tot_pJ_150b = np.sum(Det_pJoules_150_b) values = [Tot_pJ_90a, Tot_pJ_90b, Tot_pJ_150a, Tot_pJ_150b] val_sum = np.sum(values) min_value = np.amin(values) max_value = np.amax(values) return val_sum #,values event_list = [] for event in peaks: pixels = pixels_affected_in_event(cosig, event) s_time = event[0] e_time = event[1] event_total_energy = 0 for pixel in pixels: event_total_energy += total_energy( pixel, s_time, e_time ) # * 6.241509 *10**6 # used if you need to convert from pJ to eV self._hist.fill(event_total_energy) event_list.append(event_total_energy) e_min = np.min(event_list) e_max = np.max(event_list) print "Min energy of event:", e_min, 'pJoules. Max energy of event:', e_max, 'pJoules' #print sorted(event_list) def finalize(self): plt.step(*self._hist.data) #plt.xlabel('TOD track:', + str(self._tag)) plt.ylabel('Events') plt.xlabel('in pJoules') plt.xscale('log') plt.yscale('log') #plt.title( plt.show()
class CorrelationFilter(Routine): """A base routine for correlation filter""" def __init__(self, cosig_key, tod_key, output_key, all_coeff_output_key, coeff=0.8): Routine.__init__(self) self._cosig_key = cosig_key self._tod_key = tod_key self._pr = None self._template = None self._output_key = output_key self._all_coeff_output_key = all_coeff_output_key self._coeff = coeff self._tag = None def initialize(self): self._pr = PixelReader() def execute(self): print '[INFO] Checking for correlation ...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data cuts = self.get_store().get(self._cosig_key) # retrieve tod_data peaks = cuts['peaks'] def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] return time, d_1, d_2, d_3, d_4 def avg_signal(pixels, start_time, end_time): for pid in pixels: # x = timeseries(pid,start_time,end_time)[0] # y = timeseries(pid,start_time,end_time)[1] x, y1, y2, y3, y4 = timeseries(pid, start_time, end_time) avg_y1, avg_y2, avg_y3, avg_y4 = np.zeros(len(y1)), np.zeros( len(y2)), np.zeros(len(y3)), np.zeros(len(y4)) avg_x = x avg_y1 += y1 avg_y2 += y2 avg_y3 += y3 avg_y4 += y4 x = avg_x y1 = avg_y1 / len(avg_y1) y2 = avg_y2 / len(avg_y2) y3 = avg_y3 / len(avg_y3) y4 = avg_y4 / len(avg_y4) return x, y1, y2, y3, y4 def correlation(x1, x2, y1, y2): """ f1 = interp1d(x1,y1) f2 = interp1d(x2,y2) points = 100 # points = 2*max(len(x1), len(x2)) # double precision x1new = np.linspace(min(x1), max(x1), points) x2new = np.linspace(min(x2), max(x2), points) y1new = f1(x1new) y2new = f2(x2new) """ """ NUMPY CORRELATION ROUTINE """ #m_coeff = np.corrcoef(y1new,y2new)[0][1] """ a = y1new b = y2new a = (a - np.mean(a)) / (np.std(a) * len(a)) b = (b - np.mean(b)) / (np.std(b)) c = np.correlate(a, b, 'full') m_coeff = np.max(abs(c)) return m_coeff """ ts1 = y1 ts2 = y2 l1 = len(ts1) l2 = len(ts2) if l1 < l2: n = l1 return max([ np.corrcoef(ts1, ts2[i:n + i])[0][1] for i in range(0, l2 - l1) ]) elif l2 < l1: n = l2 return max([ np.corrcoef(ts1[i:n + i], ts2)[0][1] for i in range(0, l1 - l2) ]) else: # l1 == l2 return np.corrcoef(ts1, ts2)[0][1] """ plt.subplot(211) plt.plot( x1new,y1new,'g--') plt.title('Two Signals to Check for Correlation') plt.subplot(212) plt.plot(x2new,y2new,'r--') plt.xlabel('Cor. Matrix Coeff: ' + str(m_coeff)) plt.show() """ """ CHECK CORRELATION BETWEEN SIGNALS FROM TWO EVENTS Find avgerage signal from an peak, copy events from peaks data below To check correlation, call correlation function with peak data """ cs = cuts['coincident_signals'] """ FOR TWO SPECIFIC EVENTS """ """ event1 = [133034,133273,239,8] stime1 = event1[0] etime1 = event1[1] pixels1 = pixels_affected_in_event(cs, event1) avg_x1, avg_y1 = avg_signal(pixels1, stime1, etime1) np.savetxt('newslow_template.txt',(avg_x1,avg_y1)) # event2 = [205344, 205375, 31, 35] event2 = [9300,9303,3,2] stime2 = event2[0] etime2 = event2[1] pixels2 = pixels_affected_in_event(cs, event2) avg_x2, avg_y2 = avg_signal(pixels2, stime2, etime2) correlation(avg_x1,avg_x2, avg_y1, avg_y2) """ """ TEMPLATE FRB or CR AS EVENT 1 change name of .txt file to frb_template or cr_template to check correlation for either signal """ avg_x1, avg_y1 = self._template[0], self._template[1] """ ALL EVENTS To compare all events in track to template, initiate this loop """ # Save outputs to a dictionary, here we initialize an empty dictionary events = [] all_coeffs = [] lower_threshold = 0.6 upper_threshold = self._coeff for peak in peaks: all_pixels = pixels_affected_in_event(cs, peak) avg_x2, avg_y2_1, avg_y2_2, avg_y2_3, avg_y2_4 = avg_signal( all_pixels, peak[0], peak[1]) coeff1 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_1) coeff2 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_2) coeff3 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_3) coeff4 = correlation(avg_x1, avg_x2, avg_y1, avg_y2_4) all_coeffs.append(coeff1) if (lower_threshold <= coeff1) & (lower_threshold <= coeff2) & ( lower_threshold <= coeff3 ) & (lower_threshold <= coeff4) & (coeff1 < upper_threshold) & ( coeff2 < upper_threshold) & (coeff3 < upper_threshold) & ( coeff4 < upper_threshold): print '[INFO] Possible %s' % self._tag, peak, 'Coeff = ', coeff1, coeff2, coeff3, coeff4 #all_coeffs.append(coeff) elif (coeff1 >= upper_threshold) & (coeff2 >= upper_threshold) & ( coeff3 >= upper_threshold) & (coeff4 >= upper_threshold): print '[INFO] Highly Likely %s' % self._tag, peak, 'Coeff = ', coeff1, coeff2, coeff3, coeff4 #all_coeffs.append(coeff) start = peak[0] end = peak[1] duration = peak[2] number_of_pixels = peak[3] ref_index = int((start + end) / 2) # use as reference point id = "%d.%d" % (self.get_id(), start) event = { 'id': id, 'start': start, # start index 'end': end, # end index 'duration': duration, 'ctime': tod_data.ctime[ref_index], # ref time 'alt': tod_data.alt[ref_index], # ref alt 'az': tod_data.az[ref_index], # ref az 'number_of_pixels': number_of_pixels, 'pixels_affected': all_pixels, 'coefficients': [coeff1, coeff2, coeff3, coeff4], 'tag': self._tag } events.append(event) print '[INFO] Events passed: %d / %d' % (len(events), len(peaks)) self.get_store().set(self._output_key, events) self.get_store().set(self._all_coeff_output_key, all_coeffs)
class CorrelationFilter(Routine): """A base routine for correlation filter""" def __init__(self, cosig_key, tod_key, output_key,all_coeff_output_key, coeff=0.8): Routine.__init__(self) self._cosig_key = cosig_key self._tod_key = tod_key self._pr = None self._template = None self._output_key = output_key self._all_coeff_output_key = all_coeff_output_key self._coeff = coeff self._tag = None def initialize(self): self._pr = PixelReader() def execute(self): print '[INFO] Checking for correlation ...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data cuts = self.get_store().get(self._cosig_key) # retrieve tod_data peaks = cuts['peaks'] def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] return time, d_1 """ TEMPLATE FRB or CR AS EVENT 1 change name of .txt file to frb_template or cr_template to check correlation for either signal """ avg_x1, avg_y1 = self._template[0], self._template[1] temp_time = len(avg_x1) def avg_signal(pixels, start_time, end_time): for pid in pixels: x = timeseries(pid,start_time,end_time)[0] y = timeseries(pid,start_time,end_time)[1] if len(x) < temp_time: buff = int(temp_time - len(x))/2 x, y = timeseries(pid,start_time,end_time,buff) avg_y = np.zeros(len(y)) avg_x = x avg_y += y x1 = avg_x1 y1 = avg_y1 else: buff = int(len(x)-temp_time)/2 pad = [0]*buff x, y = timeseries(pid,start_time,end_time) avg_y = np.zeros(len(y)) avg_x = x avg_y += y x1,y1 =[],[] x1.extend(pad) x1.extend(avg_x1) x1.extend(pad) y1.extend(pad) y1.extend(avg_y1) y1.extend(pad) x2 = avg_x y2 = avg_y/len(avg_y) return x1,y1,x2,y2 def correlation(x1,x2,y1,y2): """ #NORMALIZE THE SIGNAL BEFORE CORRELATING min_y1,max_y1= np.min(y1), np.max(y1) min_y2,max_y2 = np.min(y2), np.max(y2) norm_y1 = (y1 - min_y1)/(max_y1 - min_y1) norm_y2 = (y2 - min_y2)/(max_y2 - min_y2) """ f1 = interp1d(x1,norm_y1) f2 = interp1d(x2,norm_y2) points = 100 # points = 2*max(len(x1), len(x2)) # double precision x1new = np.linspace(min(x1), max(x1), points) x2new = np.linspace(min(x2), max(x2), points) y1new = f1(x1new) y2new = f2(x2new) py1 = pd.DataFrame(y1new) py2 = pd.DataFrame(y2new) cor = pd.rolling_cor(py1,py2,5,center=True) coeff = np.array(cor) coeff = coeff[np.logical_not(np.isnan(coeff))] coeff = abs(coeff) max_coeff = max(coeff) return max_coeff """ plt.subplot(211) plt.plot( x1new,y1new,'g--') plt.title('Two Signals to Check for Correlation') plt.subplot(212) plt.plot(x2new,y2new,'r--') plt.xlabel('Cor. Matrix Coeff: ' + str(m_coeff)) plt.show() """ """ CHECK CORRELATION BETWEEN SIGNALS FROM TWO EVENTS Find avgerage signal from an peak, copy events from peaks data below To check correlation, call correlation function with peak data """ cs = cuts['coincident_signals'] """ FOR TWO SPECIFIC EVENTS """ """ event1 = [101980, 101985, 5, 2] stime1 = event1[0] etime1 = event1[1] pixels1 = pixels_affected_in_event(cs, event1) avg_x1, avg_y1 = avg_signal(pixels1, stime1, etime1) np.savetxt('frb_template.txt',(avg_x1,avg_y1)) # event2 = [205344, 205375, 31, 35] event2 = [9300,9303,3,2] stime2 = event2[0] etime2 = event2[1] pixels2 = pixels_affected_in_event(cs, event2) avg_x2, avg_y2 = avg_signal(pixels2, stime2, etime2) correlation(avg_x1,avg_x2, avg_y1, avg_y2) """ """ ALL EVENTS To compare all events in track to template, initiate this loop """ # Save outputs to a dictionary, here we initialize an empty dictionary events = [] all_coeffs = [] lower_threshold = 0.6 upper_threshold = self._coeff for peak in peaks: all_pixels = pixels_affected_in_event(cs, peak) avg_x1,avg_y1,avg_x2, avg_y2 = avg_signal(all_pixels, peak[0], peak[1]) coeff = correlation(avg_x1, avg_x2, avg_y1, avg_y2) if lower_threshold <= coeff < upper_threshold: print '[INFO] Possible %s' % self._tag, peak, 'Coeff = ', coeff all_coeffs.append(coeff) elif coeff >= upper_threshold: all_coeffs.append(coeff) print '[INFO] Highly Likely %s' % self._tag, peak, 'Coeff = ', coeff start = peak[0] end = peak[1] duration = peak[2] number_of_pixels = peak[3] ref_index = int((start + end)/2) # use as reference point id = "%d.%d" % (self.get_id(), start) event = { 'id': id, 'start': start, # start index 'end': end, # end index 'duration': duration, 'ctime': tod_data.ctime[ref_index], # ref time 'alt': tod_data.alt[ref_index], # ref alt 'az': tod_data.az[ref_index], # ref az 'number_of_pixels': number_of_pixels, 'pixels_affected': all_pixels, 'coefficient': coeff, 'tag': self._tag } events.append(event) print '[INFO] Events passed: %d / %d' % (len(events), len(peaks)) self.get_store().set(self._output_key, events) self.get_store().set(self._all_coeff_output_key,all_coeffs)
class PlotGlitches(Routine): """A routine that plot glitches""" def __init__(self,tag, cosig_key, tod_key): Routine.__init__(self) self._tag = tag self._cosig_key = cosig_key self._tod_key = tod_key self._pr = None def initialize(self): self._pr = PixelReader() def execute(self): print '[INFO] Loading Glitch Data ...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data cuts = self.get_store().get(self._cosig_key) # retrieve tod_data # print('[INFO] pixels affected: ',pixels) peaks = cuts['peaks'] #print('[INFO] peaks: ', peaks) def cs_cuts(): cuts = self.get_store().get(self._cosig_key) return cuts['coincident_signals'] def timeseries(pixel_id, s_time, e_time, buffer=10): start_time = s_time - buffer end_time = e_time + buffer a1, a2 = self._pr.get_f1(pixel_id) b1, b2 = self._pr.get_f2(pixel_id) d1, d2 = tod_data.data[a1], tod_data.data[a2] d3, d4 = tod_data.data[b1], tod_data.data[b2] # try to remove the mean from start_time to end_time d1 -= np.mean(d1[start_time:end_time]) d2 -= np.mean(d2[start_time:end_time]) d3 -= np.mean(d3[start_time:end_time]) d4 -= np.mean(d4[start_time:end_time]) time = tod_data.ctime - tod_data.ctime[0] time = time[start_time:end_time] d_1 = d1[start_time:end_time] d_2 = d2[start_time:end_time] d_3 = d3[start_time:end_time] d_4 = d4[start_time:end_time] """ UNCOMMENT TO PLOT FOUR CORRESPONDING PIXELS WITH HI-LO FREQ plt.plot(time,d_1, '.-', label=str(a1) + ' 90 GHz') plt.plot(time, d_2, '.-', label=str(a2) + ' 90 GHz') plt.plot(time, d_3, '.-', label=str(b1) + ' 150 GHz') plt.plot(time, d_4, '.-', label=str(b2) + ' 150 GHz') plt.legend(title='Detector UID') plt.show() """ return time, d_1, d_2, d_3, d_4 """ PLOTTING FUNCTION Plot all pixels affected given an array of pixel ids and a starting time and ending time """ def plotter(pixels,start_time,end_time): for pid in pixels: x = timeseries(pid,start_time,end_time)[0] y1 = timeseries(pid,start_time,end_time)[1] y2 = timeseries(pid,start_time,end_time)[2] y3 = timeseries(pid,start_time,end_time)[3] y4 = timeseries(pid,start_time,end_time)[4] plt.title('Pixel affected from ' +str(start_time)+ '-' + str(end_time)+ ', Pixel ' + str(pid)) plt.xlabel('TOD track:' + str(self._tag)) # CHANGE TOD TRACK NAME plt.plot(x,y1,'.-',label='90 GHz') plt.plot(x,y2,'.-',label='90 GHz') plt.plot(x,y3,'.-',label='150 GHz') plt.plot(x,y4,'.-',label='150 GHz') plt.legend() plt.show() """ ALL EVENTS From peaks, find cs, then use cs to find all pixels affected then plot all pixels affected in all events in peak one by one """ cs = cuts['coincident_signals'] """ for event in peaks: all_pixels = pixels_affected_in_event(cs,event) plotter(all_pixels, event[0], event[1]) """ """ SPECIFIC EVENT To plot specific event, copy event from peaks below """ e = raw_input('Please copy the event list to plot 4 freq channels:') event = json.loads(e) stime = event[0] etime = event[1] pixels = pixels_affected_in_event(cs, event) print 'Pixels Affected:', pixels plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() y_n = ' ' while y_n != 'n': y_n = raw_input ("Would you like to plot another event? Enter y/n...") if y_n == 'y': e= raw_input('Please copy the event list to plot 4 freq channels:') event = json.loads(e) stime = event[0] etime = event[1] pixels = pixels_affected_in_event(cs, event) print '[INFO] Plotting Glitch...' plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() else: print 'No plot will be displayed!' """
def initialize(self): self._pr = PixelReader(season=self._season, array=self._array)
def execute(self): print '[INFO] Loading Glitch Data ...' tod_data = self.get_store().get(self._tod_key) # retrieve tod_data array_name = self.get_array() events = self.get_store().get(self._input_key) peaks = [event['peak'] for event in events] for i in range(len(peaks)): print ('[INFO] Filtered peak: ', i,peaks[i]) self._pr = PixelReader(season= '2017', array=self.get_context().get_array()) plot = raw_input("Do you want to plot an event? Enter y/n: ") if plot == "y": tod_data = self.get_store().get(self._tod_key) # retrieve tod_data events = self.get_store().get(self._input_key) # retrieve tod_data peaks = [event['peak'] for event in events] timeseries = self.get_store().get(self._timeseries_key) """ PLOTTING FUNCTION Plot all pixels affected given an array of pixel ids and a starting time and ending time """ def plotter(pixels,start_time,end_time): for pid in pixels: x = timeseries(pid,start_time,end_time)[0] y1 = timeseries(pid,start_time,end_time)[1] y2 = timeseries(pid,start_time,end_time)[2] y3 = timeseries(pid,start_time,end_time)[3] y4 = timeseries(pid,start_time,end_time)[4] plt.title('Pixel affected from ' +str(start_time)+ '-' + str(end_time)+ ', Pixel ' + str(pid)) plt.xlabel('TOD track:' + str(self._tag)) plt.plot(x,y1,'.-',label='90 GHz') plt.plot(x,y2,'.-',label='90 GHz') plt.plot(x,y3,'.-',label='150 GHz') plt.plot(x,y4,'.-',label='150 GHz') plt.legend() plt.show() """ SPECIFIC EVENT To plot specific event, this interface will ask you to supply the event list, make sure you manually convert the last string to a float or integer """ e = raw_input('Please copy the event index to plot 4 freq channels:') event = events[int(e)] stime = event['start'] etime = event['end'] pixels = event['pixels_affected'] plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() y_n = ' ' while y_n != 'n': y_n = raw_input ("Would you like to plot another event? Enter y/n...") if y_n == 'y': e= raw_input('Please copy the event index to plot 4 freq channels:') event = events[int(e)] stime = event['start'] etime = event['end'] pixels = event['pixels_affected'] print '[INFO] Plotting Glitch...' plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() else: print 'No plot will be displayed!'
def initialize(self): self._pr = PixelReader() self._hist = Hist1D(0, 5, 100) #change max
def execute(self): # retrieve all cuts self._pr = PixelReader(season=self._season, array=self.get_context().get_array()) cuts_data = self.get_store().get(self._input_key) # get saved cut data cuts = cuts_data['cuts'] nsamps = cuts_data['nsamps'] # get all pixels pixels = self._pr.get_pixels() # initialize dictionary to store coincident signals cosig = {} # loop through pixels and find cosig for each pixel for p in pixels: dets_f1 = self._pr.get_f1(p) dets_f2 = self._pr.get_f2(p) if self._strict: # strict mode, 4 TES have to be present if len(dets_f1) == 2 and len(dets_f2) == 2: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts( cuts_f1_A, cuts_f1_B ) # polarized spikes may appear at either pol cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts( cuts_f1_A, cuts_f1_B ) # unpolarized spikes appear in both pols cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id else: # loose mode, at least one TES has to be present each freq if len(dets_f1) == 2 and len(dets_f2) == 2: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts( cuts_f1_A, cuts_f1_B ) # polarized spikes may appear at either pol cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts( cuts_f1_A, cuts_f1_B ) # unpolarized spikes appear in both pols cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 1 and len(dets_f2) == 2: cuts_f1 = cuts.cuts[dets_f1[0]] # low freq, polarization A cuts_f2_A = cuts.cuts[ dets_f2[0]] # high freq, polarization A cuts_f2_B = cuts.cuts[ dets_f2[1]] # high freq, polarization B if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f2 = merge_cuts(cuts_f2_A, cuts_f2_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f2 = common_cuts(cuts_f2_A, cuts_f2_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 2 and len(dets_f2) == 1: cuts_f1_A = cuts.cuts[ dets_f1[0]] # low freq, polarization A cuts_f1_B = cuts.cuts[ dets_f1[1]] # low freq, polarization B cuts_f2 = cuts.cuts[ dets_f2[0]] # high freq, polarization A if self._polarized: # if looking for polarized, glitch may occur in either polarization cuts_f1 = merge_cuts(cuts_f1_A, cuts_f1_B) else: # if looking for unpolarized, glitch must occur in both polarizations cuts_f1 = common_cuts(cuts_f1_A, cuts_f1_B) cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id elif len(dets_f1) == 1 and len(dets_f2) == 1: cuts_f1 = cuts.cuts[dets_f1[0]] # low freq, polarization A cuts_f2 = cuts.cuts[ dets_f2[0]] # high freq, polarization A cosig[str(p)] = common_cuts( cuts_f1, cuts_f2) # store coincident signals by pixel id # cosig may contain empty cut vectors because we didn't enforce it, filter them out now cosig_filtered = {} for pixel in cosig: cuts = cosig[pixel] if len(cuts) != 0: cosig_filtered[pixel] = cuts # save cosig for further processing self.get_store().set( self._output_key, cosig_filtered) # save the coincident signals under the output_key self.get_store().set( "nsamps", nsamps) # save the number of sampling points, not graceful
def execute(self, store): print '[INFO] Loading Glitch Data ...' tod_data = store.get(self._tod_key) # retrieve tod_data cuts = store.get(self._cosig_key) # retrieve tod_data array_name = self.get_array() peaks = cuts['peaks'] #print('[INFO] All glitches, unfiltered...') #print('[INFO] peaks: ', peaks) #self._pr = PixelReader(season= '2017', array=self.get_context().get_array()) #for covered self._pr = PixelReader() #for uncovered #self._pr = PixelReader(season='2017',array = str(array_name)) #self._pr = PixelReader(season='2017', array=self.get_context().get_array()) plot = raw_input("Do you want to plot an event? Enter y/n: ") if plot == "y": tod_data = store.get(self._tod_key) # retrieve tod_data cuts = store.get(self._cosig_key) # retrieve tod_data peaks = cuts['peaks'] def cs_cuts(): cuts = store.get(self._cosig_key) return cuts['coincident_signals'] timeseries = store.get(self._timeseries_key) """ PLOTTING FUNCTION Plot all pixels affected given an array of pixel ids and a starting time and ending time """ def plotter(pixels, start_time, end_time): for pid in pixels: x = timeseries(pid, start_time, end_time)[0] y1 = timeseries(pid, start_time, end_time)[1] y2 = timeseries(pid, start_time, end_time)[2] y3 = timeseries(pid, start_time, end_time)[3] y4 = timeseries(pid, start_time, end_time)[4] plt.title('Pixel affected from ' + str(start_time) + '-' + str(end_time) + ', Pixel ' + str(pid)) plt.xlabel('TOD track:' + str(self._tag)) plt.plot(x, y1, '.-', label='90 GHz') plt.plot(x, y2, '.-', label='90 GHz') plt.plot(x, y3, '.-', label='150 GHz') plt.plot(x, y4, '.-', label='150 GHz') plt.legend() plt.show() """ SPECIFIC EVENT To plot specific event, this interface will ask you to supply the event list, make sure you manually convert the last string to a float or integer """ cs = cuts['coincident_signals'] e = raw_input( 'Please copy the event list to plot 4 freq channels:') event = json.loads(e) stime = event[0] etime = event[1] pixels = pixels_affected_in_event(cs, event) plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() y_n = ' ' while y_n != 'n': y_n = raw_input( "Would you like to plot another event? Enter y/n...") if y_n == 'y': e = raw_input( 'Please copy the event list to plot 4 freq channels:') event = json.loads(e) stime = event[0] etime = event[1] pixels = pixels_affected_in_event(cs, event) print '[INFO] Plotting Glitch...' plotter(pixels, stime, etime) self._pr.plot(pixels) plt.show() else: print 'No plot will be displayed!'