Ejemplo n.º 1
0
    def Process(self):
        itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps)
        Info = SV()
        timeslice = 0.
        start = self.gps
        last = False
        fails = 0
        iter_fails = 0
        while start <= self.lastGPS:
            try:
                itfStatus.GetData(Info)
            except:
                if iter_fails == 0:  # online
                    logging.warning(
                        "GPS time: %s. Waiting for new acquired data" % start)
                    time.sleep(1000)
                    iter_fails += 1
                    timeslice += 1
                else:
                    timeslice = 0
                    logging.error("DAQ PROBLEM @GPS %s" % start)
                    fails += 1
                    start += 1.0
                    itfStatus = FrameIChannel(self.file, self.state_chan, 1.,
                                              start)
            else:
                start = Info.GetX(0)
                iter_fails = 0
                # if Info.GetY(0, 0) == 0:
                #    logging.error("MISSING DATA @GPS %s" % start)
                #    fails += 1
                if Info.GetY(0, 0) in self.flag:
                    timeslice += 1.0
                else:
                    if (timeslice >= self.minSlice):
                        gpsEnd = start
                        gpsStart = gpsEnd - timeslice
                        logging.info(
                            "New segment created: Start %s End %s Duration %s"
                            % (gpsStart, gpsEnd, timeslice))
                        self.update_observers([[gpsStart, gpsEnd]], last)
                        logging.error("Total Fails: %s" % fails)
                    timeslice = 0.

                if (timeslice >= self.maxSlice):
                    gpsEnd = start
                    gpsStart = gpsEnd - timeslice
                    logging.info(
                        "New science segment created: Start %s End %s Duration %s"
                        % (gpsStart, gpsEnd, timeslice))
                    self.update_observers([[gpsStart, gpsEnd]], last)
                    timeslice = 0.

                if start == self.lastGPS:
                    logging.info("Segment creation completed")
                    self.unregister_all()
                    break
Ejemplo n.º 2
0
 def test_something (self):
     n_est = 100000
     sampFreq=0.01
     ##Do whitening parameters estimation
     learn = SV(0.0, 1.0 / sampFreq, n_est)
     mu, sigma = 0, 0.1  # mean and standard deviation
     x= np.random.normal(mu, sigma, n_est)
     for i in range(n_est):
         learn.FillPoint(0, i, float(x[i]))
     par=Parameters()
     par.ARorder=100
     whiten=Whitening(par.ARorder)
     whiten.ParametersEstimate(learn)
     sigma=whiten.GetSigma()
     self.assertTrue(sigma>0)
Ejemplo n.º 3
0
    def __init__(self, start, sampling, N):

        try:
            self.start = float(start)
        except ValueError:
            logging.info("starting time not defined")
        try:
            self.sampling = float(sampling)
        except ValueError:
            logging.info("sampling not defined")
        try:
            self.N = N
        except ValueError:
            logging.info("lenght not defined")
        self.SV = SV(self.start, 1.0 / self.sampling, self.N)
Ejemplo n.º 4
0
 def Process ( self ):
     itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps)
     Info = SV()
     timeslice = 0.
     start = self.gps
     while start <= self.lastGPS:
         try:
             itfStatus.GetData(Info)
             # logging.info("GPStime: %s" % Info.GetX(0))
             if Info.GetY(0, 0) == 1:
                 timeslice += 1.0
             else:
                 if (timeslice >= self.minSlice):
                     gpsEnd = Info.GetX(0)
                     gpsStart = gpsEnd - timeslice
                     logging.info(
                         "New science segment created: Start %s End %s Duration %s" % (
                             gpsStart, gpsEnd, timeslice))
                     self.update_observers([[gpsStart, gpsEnd]])
                     timeslice = 0.
                 else:
                     continue
             if (timeslice >= self.maxSlice):
                 gpsEnd = Info.GetX(0)
                 gpsStart = gpsEnd - timeslice
                 logging.info(
                     "New science segment created: Start %s End %s Duration %s" % (gpsStart, gpsEnd, timeslice))
                 self.update_observers([[gpsStart, gpsEnd]])
                 timeslice = 0.
             else:
                 continue
         except:
             logging.info("waiting for new acquired data")
             logging.info("GPStime before sleep: %s" % Info.GetX(0))
             tstart=Info.GetX(0)
             itfStatus = FrameIChannel(self.file, self.state_chan, 1., tstart-1)
             time.sleep(1000)
             logging.info("GPStime after sleep: %s" % Info.GetX(0))
         continue
         start = Info.GetX(0)
Ejemplo n.º 5
0
 def Process(self):
     itfStatus = FrameIChannel(self.file, self.state_chan, 1., self.gps)
     Info = SV()
     timeslice = 0.
     start = self.gps
     last = False
     while start < self.lastGPS:
         try:
             itfStatus.GetData(Info)
         except:
             logging.warning("GPS time: %s. Waiting for new acquired data" %
                             start)
             time.sleep(1000)
         else:
             stateVector = int(Info.GetY(0, 0))
             status = defineStateVector(stateVector, self.flag)
             if start == self.lastGPS:
                 last = True
                 gpsEnd = start
                 gpsStart = gpsEnd - timeslice
                 logging.info("Segment creation completed")
                 self.update_observers([[gpsStart, gpsEnd]], last)
                 self.unregister_all()
                 break
             if status:
                 start = Info.GetX(0)
                 timeslice += 1.0
             else:
                 if (timeslice >= self.minSlice):
                     gpsEnd = start + 1.0
                     gpsStart = gpsEnd - timeslice
                     logging.info(
                         "New segment created: Start %s End %s Duration %s"
                         % (gpsStart, gpsEnd, timeslice))
                     self.update_observers([[gpsStart, gpsEnd]], last)
                     timeslice = 0.
                 else:
                     timeslice = 0.
Ejemplo n.º 6
0
    def segmentProcess(self,
                       segment,
                       wavThresh=WaveletThreshold.dohonojohnston):
        gpsStart = segment[0]
        gpsEnd = segment[1]
        logging.info(
            "Analyzing segment: %s-%s for channel %s downslampled at %dHz" %
            (gpsStart, gpsEnd, self.par.channel, self.par.resampling))
        start_time = time.time()
        ID = str(self.par.run) + '_' + str(self.par.channel) + '_' + str(
            int(gpsStart))
        dir_chunk = self.par.outdir + ID + '/'
        self.par.LSLfile = dir_chunk + "LSLcoeff-AR%s-fs%s-%s.txt" % (
            self.par.ARorder, self.par.resampling, self.par.channel)
        # create the output dir
        if not os.path.exists(dir_chunk):
            os.makedirs(dir_chunk)
        if not os.path.isfile(dir_chunk + 'ProcessEnded.check'):
            if os.path.isfile(self.par.LSLfile):
                logging.info('Load LSL parameter')
                LSL = LSLLearning(self.par.ARorder, self.par.sigma,
                                  self.Elambda)
                LSL.Load(self.par.LSLfile)
            else:
                logging.info('Start LSL parameter estimation')
                streamL = FrameIChannel(self.par.file, self.par.channel,
                                        self.learn, gpsStart)
                data = SV()
                data_ds = SV()
                self.par.Noutdata = int(5 * self.par.len * self.par.resampling)
                dsL = downsamplig(self.par)
                self.par.sigma = 0.0
                while self.par.sigma == 0.0:
                    streamL.GetData(data)
                    dsL.Process(data, data_ds)
                    # estimate rough sigma
                    y = np.empty(self.par.Noutdata)
                    for j in range(self.par.Noutdata):
                        y[j] = data_ds.GetY(0, j)
                    self.par.sigma = np.std(y) * np.std(
                        y) * self.par.resampling

                logging.info('Rough Estimated sigma= %s' % self.par.sigma)
                LSL = LSLLearning(self.par.ARorder, self.par.sigma,
                                  self.Elambda)
                dataw = SV()
                ######################
                streamL.GetData(data)
                dsL.Process(data, data_ds)
                LSL(data_ds, dataw)
                LSL.Save(self.par.LSLfile)
                del data, data_ds, dataw, dsL, streamL
            self.par.sigma = np.sqrt(LSL.GetSigma())
            # sigma for the noise
            logging.info('LSL Estimated sigma= %s' % self.par.sigma)

            ## update the self.parameters to be saved in local json file
            self.par.ID = ID
            self.par.dir = dir_chunk
            self.par.gps = gpsStart
            self.par.gpsStart = gpsStart
            self.par.gpsEnd = gpsEnd

            ######################
            # self.parameter for sequence of data and the resampling
            self.par.Noutdata = int(self.par.len * self.par.resampling)
            ds = downsamplig(self.par)
            # gpsstart = gpsStart - self.par.preWhite * self.par.len
            streaming = FrameIChannel(self.par.file, self.par.channel,
                                      self.par.len, gpsStart)
            data = SV()
            data_ds = SV()
            dataw = SV()
            lsl = LSLfilter(LSL, self.Alambda, self.par.Noutdata, False)
            ###---preheating---###
            # reading data, downsampling and whitening
            streaming.GetData(data)
            ds.Process(data, data_ds)
            for i in range(self.par.preWhite):
                streaming.GetData(data)
                ds.Process(data, data_ds)
                lsl(data_ds, dataw)

            ### WDF process
            WDF = wdf(self.par, wavThresh)
            # WDF=wdf(self.par)
            ## register obesevers to WDF process
            # put 0 to save only metaself.parameters, 1 for wavelet coefficients and 2 for waveform estimation
            savetrigger = SingleEventPrintTriggers(self.par, self.fullPrint)
            parameterestimation = ParameterEstimation(self.par)
            parameterestimation.register(savetrigger)
            WDF.register(parameterestimation)
            filejson = 'parametersUsed.json'
            self.par.dump(self.par.dir + filejson)
            ###Start detection loop
            logging.info("Starting detection loop")
            while data.GetStart() < gpsEnd:
                streaming.GetData(data)
                ds.Process(data, data_ds)
                lsl(data_ds, dataw)
                WDF.SetData(dataw)
                WDF.Process()

            elapsed_time = time.time() - start_time
            timeslice = gpsEnd - gpsStart
            logging.info('analyzed %s seconds in %s seconds' %
                         (timeslice, elapsed_time))
            fileEnd = self.par.dir + "ProcessEnded.check"
            open(fileEnd, 'a').close()
        else:
            logging.info('Segment already processed')
Ejemplo n.º 7
0
print(Config.sections())

filename = Config.get("FileData", 'filename')
fn = filename.strip('../datawav')
print(fn)
#data fraction for AR estimations in seconds
DataFraction = Config.getfloat("Whitening", 'DataFraction')
##read data
sampFreq, snd = wavfile.read(filename)
print(sampFreq, snd.dtype, snd.shape)

n_est = int(DataFraction * sampFreq)
print(n_est)

##Do whitening parameters estimation
learn = SV(0.0, 1.0 / sampFreq, n_est)

for i in range(n_est):
    learn.FillPoint(0, i, float(snd[i]))

ARorder = Config.getint("Whitening", 'ARoder')
ADE = pytsa.tsa.ArBurgEstimator(ARorder)
LV = pytsa.tsa.LatticeView(ARorder)
LF = pytsa.tsa.LatticeFilter(LV)
LVfile = "./LVparam-%s.txt" % fn
ARfile = "./ARparam-%s.txt" % fn
estimation = Config.getboolean('Whitening', "estimation")
if estimation == True:
    print('Start whitening parameters estimation')
    ADE(learn)
    ADE.GetLatticeView(LV)
Ejemplo n.º 8
0
    def segmentProcess(self,
                       segment,
                       wavThresh=WaveletThreshold.dohonojohnston):
        gpsStart = segment[0]
        gpsEnd = segment[1]
        logging.info(
            "Analyzing segment: %s-%s for channel %s downslampled at %dHz" %
            (gpsStart, gpsEnd, self.par.channel, self.par.resampling))
        start_time = time.time()
        ID = str(self.par.run) + '_' + str(self.par.channel) + '_' + str(
            int(gpsStart))
        dir_chunk = self.par.outdir + ID + '/'
        # create the output dir
        if not os.path.exists(dir_chunk):
            os.makedirs(dir_chunk)
        if not os.path.isfile(dir_chunk + 'ProcessEnded.check'):
            # self.parameter for whitening and its estimation self.parameters
            whiten = Whitening(self.par.ARorder)
            self.par.ARfile = dir_chunk + "ARcoeff-AR%s-fs%s-%s.txt" % (
                self.par.ARorder, self.par.resampling, self.par.channel)
            self.par.LVfile = dir_chunk + "LVcoeff-AR%s-fs%s-%s.txt" % (
                self.par.ARorder, self.par.resampling, self.par.channel)

            if os.path.isfile(self.par.ARfile) and os.path.isfile(
                    self.par.LVfile):
                logging.info('Load AR parameters')
                whiten.ParametersLoad(self.par.ARfile, self.par.LVfile)
            else:
                logging.info('Start AR parameter estimation')
                ######## read data for AR estimation###############
                # self.parameter for sequence of data.
                # Add a 100.0 seconds delay to not include too much after lock noise in the estimation
                if (gpsEnd - gpsStart >= self.learnlen + 100.0):
                    gpsE = gpsStart + 100.0
                else:
                    gpsE = gpsEnd - self.learnlen
                strLearn = FrameIChannel(self.par.file, self.par.channel,
                                         self.learnlen, gpsE)
                Learn = SV()
                Learn_DS = SV()
                self.par.Noutdata = int(self.par.learn * self.par.resampling)
                ds = downsamplig(self.par)
                strLearn.GetData(Learn)
                ds.Process(Learn, Learn_DS)
                whiten.ParametersEstimate(Learn_DS)
                whiten.ParametersSave(self.par.ARfile, self.par.LVfile)
                del Learn, ds, strLearn, Learn_DS
            # sigma for the noise
            self.par.sigma = whiten.GetSigma()
            logging.info('Estimated sigma= %s' % self.par.sigma)
            ## update the self.parameters to be saved in local json file
            self.par.ID = ID
            self.par.dir = dir_chunk
            self.par.gps = gpsStart
            self.par.gpsStart = gpsStart
            self.par.gpsEnd = gpsEnd

            ######################
            # self.parameter for sequence of data and the resampling
            self.par.Noutdata = int(self.par.len * self.par.resampling)
            ds = downsamplig(self.par)
            # gpsstart = gpsStart - self.par.preWhite * self.par.len
            streaming = FrameIChannel(self.par.file, self.par.channel,
                                      self.par.len, gpsStart)
            data = SV()
            data_ds = SV()
            dataw = SV()
            ###---preheating---###
            # reading data, downsampling and whitening
            for i in range(self.par.preWhite):
                streaming.GetData(data)
                ds.Process(data, data_ds)
                whiten.Process(data_ds, dataw)
            ### WDF process
            WDF = wdf(self.par, wavThresh)
            # WDF=wdf(self.par)
            ## register obesevers to WDF process
            # put 0 to save only metaself.parameters, 1 for wavelet coefficients and 2 for waveform estimation
            savetrigger = SingleEventPrintTriggers(self.par, self.fullPrint)
            parameterestimation = ParameterEstimation(self.par)
            parameterestimation.register(savetrigger)
            WDF.register(parameterestimation)
            filejson = 'parametersUsed.json'
            self.par.dump(self.par.dir + filejson)
            ###Start detection loop
            logging.info("Starting detection loop")
            while data.GetStart() < gpsEnd:
                streaming.GetData(data)
                ds.Process(data, data_ds)
                whiten.Process(data_ds, dataw)
                WDF.SetData(dataw)
                WDF.Process()

            elapsed_time = time.time() - start_time
            timeslice = gpsEnd - gpsStart
            logging.info('analyzed %s seconds in %s seconds' %
                         (timeslice, elapsed_time))
            fileEnd = self.par.dir + "ProcessEnded.check"
            open(fileEnd, 'a').close()
        else:
            logging.info('Segment already processed')
Ejemplo n.º 9
0
print(Config.sections())

filename = Config.get("FileData", 'filename')
fn = filename.strip('../datawav')
print(fn)
#data fraction for AR estimations in seconds
DataFraction = Config.getfloat("Whitening", 'DataFraction')
##read data
sampFreq, snd = wavfile.read(filename)
print(sampFreq, snd.dtype, snd.shape)

n_est = int(DataFraction * sampFreq)
print(n_est)

##Do whitening parameters estimation
learn = SV(0.0, 1.0 / sampFreq, n_est)

for i in range(n_est):
    learn.FillPoint(0, i, float(snd[i]))

ARorder = Config.getint("Whitening", 'ARoder')
ADE = pytsa.tsa.ArBurgEstimator(ARorder)
LV = pytsa.tsa.LatticeView(ARorder)
LF = pytsa.tsa.LatticeFilter(LV)
LVfile = "./LVparam-%s.txt" % fn
ARfile = "./ARparam-%s.txt" % fn
estimation = Config.getboolean('Whitening', "estimation")
if estimation == True:
    print('Start whitening parameters estimation')
    ADE(learn)
    ADE.GetLatticeView(LV)
Ejemplo n.º 10
0
def main():
    logging.basicConfig(level=logging.INFO)
    logging.info("read Parameters")
    par = Parameters()
    par.load("fileParameters.json")
    # logging.info(par.__dict__)
    # Parameter for sequence of data
    gpsE = float(par.gpsStart) + 10.0
    Learn = SV()
    Learn_DS = SV()
    # logging.info(par.sampling, par.resampling)

    # parameter for whitening and its estimation parameters
    whiten = Whitening(par.ARorder)
    ###Name of the files where the whitening parameters are saved
    LVfile = "./ARparameters/LVstate_%s_%s_%s.txt" % (par.ARorder, par.channel,
                                                      int(par.gps))
    ARfile = "./ARparameters/ARstate_%s_%s_%s.txt" % (par.ARorder, par.channel,
                                                      int(par.gps))
    if par.estimation == "True":
        logging.info('Start AR parameter estimation')
        ######## read data for AR estimation###############
        learnlen = 2.0 * float(par.learn)
        strLearn = FrameIChannel(par.file, par.channel, learnlen, gpsE)
        par.Noutdata = int(par.learn * par.resampling)
        ds = downsamplig(par)
        strLearn.GetData(Learn)
        ds.Process(Learn, Learn_DS)
        whiten.ParametersEstimate(Learn_DS)
        whiten.ParametersSave(ARfile, LVfile)
    else:
        logging.info('Load AR parameter')
        whiten.ParametersLoad(ARfile, LVfile)

    ######################
    # Parameter for sequence of data
    # read data
    par.Noutdata = int(par.len * par.resampling)
    ds = downsamplig(par)
    gpsstart = par.gpsStart - par.len
    streaming = FrameIChannel(par.file, par.channel, par.lenStart, gpsstart)
    data = SV()
    data_ds = SV()
    dataw = SV()
    ###---preheating---###
    streaming.GetData(data)
    ds.Process(data, data_ds)
    whiten.Process(data_ds, dataw)
    ### WDF process
    # sigma for the noise
    par.sigma = whiten.GetSigma()
    logging.info('Estimated sigma= %s' % par.sigma)
    par.Ncoeff = par.window

    ###Start detection loop
    logging.info("Starting detection loop")

    streaming.SetDataLength(par.len)
    startT = data.GetStart()
    ## gpsEnd=par.gpsEnd +par.lenStart
    clf = joblib.load('./pipelines/pca-gmm.pkl')
    WDF = wdf(par)

    observable = Observable()
    observableO = Observable()
    clustering = Clustering(par)
    savetrigger = PrintTriggers(par)
    classifier = Classifier(par, clf)
    observable.register(clustering)
    observableO.register(savetrigger)
    observableO.register(classifier)
    while data.GetStart() < par.gpsEnd:
        streaming.GetData(data)
        ds.Process(data, data_ds)
        whiten.Process(data_ds, dataw)
        WDF.SetData(dataw)
        while WDF.wdf2classify.GetDataNeeded() > 0:
            ev = WDF.FindEvents()
            observable.update_observers(ev)
            cev = clustering.CEV
            observableO.update_observers(cev)
    logging.info(classifier.classified)
    logging.info('Program terminated')
    par.dump("fileParametersUsed.json")
Ejemplo n.º 11
0
                :param filename: name of file where loading the parameters
                :type filename: basestring
                """
        self.filename = filename
        with open(self.filename) as data_file:
            data = json.load(data_file)
        self.__dict__ = data
        return self.__dict__

    def copy(self, param):
        """
                :param param: parameters
              """
        self.__dict__ = copy.deepcopy(param.__dict__)
        return self.__dict__


par = Parameters()
par.load("../cfg/config.json")

Learn = SV()
strLearn = FrameIChannel(par.input, par.channel, par.len_seconds,
                         par.gps_start_time)
strLearn.GetData(Learn)

print(Learn[0])

# output = open(par.output, 'wb')
# pickle.dump(Learn,output)
# output.close()
Ejemplo n.º 12
0
def main(param):
    start_time = time.time()
    logging.basicConfig(level=logging.INFO)
    logging.info("read Parameters")
    par = Parameters()
    par.load(param)
    ID = str(par.channel) + '_' + str(par.gps)
    par.outdir = par.outdir + ID + '/'
    if not os.path.exists(par.outdir):
        os.makedirs(par.outdir)
    par.ID = ID

    # parameter for whitening and its estimation parameters
    whiten = Whitening(par.ARorder)
    par.ARfile = par.outdir + "ARfile.txt"
    par.LVfile = par.outdir + "LVfile.txt"

    if os.path.isfile(par.ARfile) and os.path.isfile(par.LVfile):
        logging.info('Load AR parameter')
        whiten.ParametersLoad(par.ARfile, par.LVfile)
    else:
        logging.info('Start AR parameter estimation')
        ######## read data for AR estimation###############
        # Parameter for sequence of data
        gpsE = float(par.gps) + 10.0
        Learn = SV()
        Learn_DS = SV()
        learnlen = 2.0 * float(par.learn)
        strLearn = FrameIChannel(par.file, par.channel, 1.0, gpsE)
        strLearn.GetData(Learn)

        par.sampling = int(1.0 / Learn.GetSampling())
        par.resampling = int(sampling / 2)

        strLearn = FrameIChannel(par.file, par.channel, learnlen, gpsE)
        par.Noutdata = int(par.learn * par.resampling)
        ds = downsamplig(par)
        strLearn.GetData(Learn)
        ds.Process(Learn, Learn_DS)
        whiten.ParametersEstimate(Learn_DS)
        whiten.ParametersSave(par.ARfile, par.LVfile)

    ######################
    # Parameter for sequence of data
    # read data
    par.Noutdata = int(par.len * par.resampling)
    ds = downsamplig(par)
    gpsstart = par.gpsStart - par.len
    streaming = FrameIChannel(par.file, par.channel, par.lenStart, gpsstart)
    data = SV()
    data_ds = SV()
    dataw = SV()
    ###---preheating---###
    streaming.GetData(data)
    ds.Process(data, data_ds)
    whiten.Process(data_ds, dataw)
    ### WDF process
    # sigma for the noise
    par.sigma = whiten.GetSigma()
    logging.info('Estimated sigma= %s' % par.sigma)
    par.Ncoeff = par.window
    streaming.SetDataLength(par.len)

    WDF = wdf(par)
    clustering = Clustering(par)
    savetrigger = PrintTriggers(par)
    clustering.register(savetrigger)
    WDF.register(clustering)
    ###Start detection loop
    logging.info("Starting detection loop")
    while data.GetStart() < par.gpsEnd:
        streaming.GetData(data)
        ds.Process(data, data_ds)
        whiten.Process(data_ds, dataw)
        WDF.SetData(dataw)
        WDF.Process()
    logging.info('Program terminated')
    par.dump(par.outdir + "fileParametersUsed.json")
    elapsed_time = time.time() - start_time
    timeslice=par.gpsEnd-par.gpsStart
    logging.info('analyzed %s seconds in %s seconds' % (timeslice,elapsed_time))