def Process(self): itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps) Info = SV() timeslice = 0. start = self.gps last = False fails = 0 iter_fails = 0 while start < self.lastGPS: try: itfStatus.GetData(Info) except: if iter_fails == 0: # online logging.warning( "GPS time: %s. Waiting for new acquired data" % start) time.sleep(1000) iter_fails += 1 timeslice += 1 else: timeslice = 0 logging.error("DAQ PROBLEM @GPS %s" % start) fails += 1 start += 1.0 itfStatus = FrameIChannel(self.file, self.state_chan, 1., start) else: start = Info.GetX(0) iter_fails = 0 if Info.GetY(0, 0) == 0: logging.error("MISSING DATA @GPS %s" % start) fails += 1 if Info.GetY(0, 0) in self.flag: timeslice += 1.0 else: if (timeslice >= self.minSlice): gpsEnd = start gpsStart = gpsEnd - timeslice logging.info( "New segment created: Start %s End %s Duration %s" % (gpsStart, gpsEnd, timeslice)) self.update_observers([[gpsStart, gpsEnd]], last) logging.error("Total Fails: %s" % fails) timeslice = 0. if start == self.lastGPS: last = True gpsEnd = start gpsStart = gpsEnd - timeslice logging.info("Segment creation completed") self.update_observers([[gpsStart, gpsEnd]], last) self.unregister_all() break
def Process ( self ): itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps) Info = SV() timeslice = 0. start = self.gps while start <= self.lastGPS: try: itfStatus.GetData(Info) # logging.info("GPStime: %s" % Info.GetX(0)) if Info.GetY(0, 0) == 1: timeslice += 1.0 else: if (timeslice >= self.minSlice): gpsEnd = Info.GetX(0) gpsStart = gpsEnd - timeslice logging.info( "New science segment created: Start %s End %s Duration %s" % ( gpsStart, gpsEnd, timeslice)) self.update_observers([[gpsStart, gpsEnd]]) timeslice = 0. else: continue if (timeslice >= self.maxSlice): gpsEnd = Info.GetX(0) gpsStart = gpsEnd - timeslice logging.info( "New science segment created: Start %s End %s Duration %s" % (gpsStart, gpsEnd, timeslice)) self.update_observers([[gpsStart, gpsEnd]]) timeslice = 0. else: continue except: logging.info("waiting for new acquired data") logging.info("GPStime before sleep: %s" % Info.GetX(0)) tstart=Info.GetX(0) itfStatus = FrameIChannel(self.file, self.state_chan, 1., tstart-1) time.sleep(1000) logging.info("GPStime after sleep: %s" % Info.GetX(0)) continue start = Info.GetX(0)
def Process(self): itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps) Info = SV() timeslice = 0. start = self.gps last = False while start < self.lastGPS: try: itfStatus.GetData(Info) except: logging.warning("GPS time: %s. Waiting for new acquired data" % start) time.sleep(1000) else: stateVector = int(Info.GetY(0, 0)) status = defineStateVector(stateVector, self.flag) if start == self.lastGPS: last = True gpsEnd = start gpsStart = gpsEnd - timeslice logging.info("Segment creation completed") self.update_observers([[gpsStart, gpsEnd]], last) self.unregister_all() break if status: start = Info.GetX(0) timeslice += 1.0 else: if (timeslice >= self.minSlice): gpsEnd = start + 1.0 gpsStart = gpsEnd - timeslice logging.info( "New segment created: Start %s End %s Duration %s" % (gpsStart, gpsEnd, timeslice)) self.update_observers([[gpsStart, gpsEnd]], last) timeslice = 0. else: timeslice = 0.
def segmentProcess(self, segment, wavThresh=WaveletThreshold.dohonojohnston): gpsStart = segment[0] gpsEnd = segment[1] logging.info( "Analyzing segment: %s-%s for channel %s downslampled at %dHz" % (gpsStart, gpsEnd, self.par.channel, self.par.resampling)) start_time = time.time() ID = str(self.par.run) + '_' + str(self.par.channel) + '_' + str( int(gpsStart)) dir_chunk = self.par.outdir + ID + '/' self.par.LSLfile = dir_chunk + "LSLcoeff-AR%s-fs%s-%s.txt" % ( self.par.ARorder, self.par.resampling, self.par.channel) # create the output dir if not os.path.exists(dir_chunk): os.makedirs(dir_chunk) if not os.path.isfile(dir_chunk + 'ProcessEnded.check'): if os.path.isfile(self.par.LSLfile): logging.info('Load LSL parameter') LSL = LSLLearning(self.par.ARorder, self.par.sigma, self.Elambda) LSL.Load(self.par.LSLfile) else: logging.info('Start LSL parameter estimation') streamL = FrameIChannel(self.par.file, self.par.channel, self.learn, gpsStart) data = SV() data_ds = SV() self.par.Noutdata = int(5 * self.par.len * self.par.resampling) dsL = downsamplig(self.par) self.par.sigma = 0.0 while self.par.sigma == 0.0: streamL.GetData(data) dsL.Process(data, data_ds) # estimate rough sigma y = np.empty(self.par.Noutdata) for j in range(self.par.Noutdata): y[j] = data_ds.GetY(0, j) self.par.sigma = np.std(y) * np.std( y) * self.par.resampling logging.info('Rough Estimated sigma= %s' % self.par.sigma) LSL = LSLLearning(self.par.ARorder, self.par.sigma, self.Elambda) dataw = SV() ###################### streamL.GetData(data) dsL.Process(data, data_ds) LSL(data_ds, dataw) LSL.Save(self.par.LSLfile) del data, data_ds, dataw, dsL, streamL self.par.sigma = np.sqrt(LSL.GetSigma()) # sigma for the noise logging.info('LSL Estimated sigma= %s' % self.par.sigma) ## update the self.parameters to be saved in local json file self.par.ID = ID self.par.dir = dir_chunk self.par.gps = gpsStart self.par.gpsStart = gpsStart self.par.gpsEnd = gpsEnd ###################### # self.parameter for sequence of data and the resampling self.par.Noutdata = int(self.par.len * self.par.resampling) ds = downsamplig(self.par) # gpsstart = gpsStart - self.par.preWhite * self.par.len streaming = FrameIChannel(self.par.file, self.par.channel, self.par.len, gpsStart) data = SV() data_ds = SV() dataw = SV() lsl = LSLfilter(LSL, self.Alambda, self.par.Noutdata, False) ###---preheating---### # reading data, downsampling and whitening streaming.GetData(data) ds.Process(data, data_ds) for i in range(self.par.preWhite): streaming.GetData(data) ds.Process(data, data_ds) lsl(data_ds, dataw) ### WDF process WDF = wdf(self.par, wavThresh) # WDF=wdf(self.par) ## register obesevers to WDF process # put 0 to save only metaself.parameters, 1 for wavelet coefficients and 2 for waveform estimation savetrigger = SingleEventPrintTriggers(self.par, self.fullPrint) parameterestimation = ParameterEstimation(self.par) parameterestimation.register(savetrigger) WDF.register(parameterestimation) filejson = 'parametersUsed.json' self.par.dump(self.par.dir + filejson) ###Start detection loop logging.info("Starting detection loop") while data.GetStart() < gpsEnd: streaming.GetData(data) ds.Process(data, data_ds) lsl(data_ds, dataw) WDF.SetData(dataw) WDF.Process() elapsed_time = time.time() - start_time timeslice = gpsEnd - gpsStart logging.info('analyzed %s seconds in %s seconds' % (timeslice, elapsed_time)) fileEnd = self.par.dir + "ProcessEnded.check" open(fileEnd, 'a').close() else: logging.info('Segment already processed')