Пример #1
0
if __name__ == "__main__":

    st = datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
    et = datetime.datetime.strptime(et, "%Y-%m-%d %H:%M:%S")

    files = os.listdir(modelLocation)

    #Get the sensor blocks
    for f in files:
        print f
        #It is a data file.
        if f.split('.')[-1] == 'dat':

            #Open it and grab the models and sensor list
            fn = dataio.loadData(modelLocation + str(f))
            fn.matrixToModel(fn.modelList)

            print "Sensors:" + str(fn.sensors)
            cd, td = bbdata.comp(st, et, \
                    comp = compress, \
                    sens = fn.sensors,
                    readLocation = dataLocation)

            sData = markov_anneal.splitLocalMax(cd, td, splitLen)

            outFile = writeLocation + str(f.split('.')[0]) + '.txt'

            #Make the file.
            detections.write_detections(sData, fn.models, fileName=outFile)
Пример #2
0
        print i
        i+=1
        oldSplit = datetime.datetime.strptime(s[0], "%Y-%m-%d %H:%M:%S")
        newSplit = datetime.datetime.strptime(s[1], "%Y-%m-%d %H:%M:%S")
        tmpDoc = []
        
        suppress.suppress(2)
        #Get the sensor blocks
        for f in files:
            #It is a data file.
            if f.split('.')[-1] == 'dat':

                #Open it and grab the models and sensor list
                fn = dataio.loadData(modelDirectory + str(f))
                fn.matrixToModel(fn.modelList)
                cd, td = bbdata.comp(oldSplit, newSplit, \
                                    comp = compress, \
                                    sens = fn.sensors,
                                    readLocation = dataDirectory)
                                    
                sData = markov_anneal.splitLocalMax(cd, td, splitLen)
                
                #for each split, make a document matrix and append it to the
                #ongoing tdmatrix
                try:
                    val, counts = analysis.ratio(sData.values(), fn.models)
                except:
                    counts = [0] * len(fn.models)
                    val = [0] * len(fn.models)
        suppress.restore(2)
Пример #3
0
        print i
        i += 1
        oldSplit = datetime.datetime.strptime(s[0], "%Y-%m-%d %H:%M:%S")
        newSplit = datetime.datetime.strptime(s[1], "%Y-%m-%d %H:%M:%S")
        tmpDoc = []

        suppress.suppress(2)
        #Get the sensor blocks
        for f in files:
            #It is a data file.
            if f.split('.')[-1] == 'dat':

                #Open it and grab the models and sensor list
                fn = dataio.loadData(modelDirectory + str(f))
                fn.matrixToModel(fn.modelList)
                cd, td = bbdata.comp(oldSplit, newSplit, \
                                    comp = compress, \
                                    sens = fn.sensors,
                                    readLocation = dataDirectory)

                sData = markov_anneal.splitLocalMax(cd, td, splitLen)

                #for each split, make a document matrix and append it to the
                #ongoing tdmatrix
                try:
                    val, counts = analysis.ratio(sData.values(), fn.models)
                except:
                    counts = [0] * len(fn.models)
                    val = [0] * len(fn.models)
        suppress.restore(2)
Пример #4
0
if __name__ == "__main__":
    
    st = datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
    et = datetime.datetime.strptime(et, "%Y-%m-%d %H:%M:%S")
    
    files = os.listdir(modelLocation)
    
    #Get the sensor blocks
    for f in files:
        print f
        #It is a data file.
        if f.split('.')[-1] == 'dat':
            
            #Open it and grab the models and sensor list
            fn = dataio.loadData(modelLocation + str(f))
            fn.matrixToModel(fn.modelList)
            
            print "Sensors:" + str(fn.sensors)
            cd, td = bbdata.comp(st, et, \
                    comp = compress, \
                    sens = fn.sensors,
                    readLocation = dataLocation)
                
            sData = markov_anneal.splitLocalMax(cd, td, splitLen)
        
            outFile = writeLocation + str(f.split('.')[0]) + '.txt'
        
            #Make the file.
            detections.write_detections(sData, fn.models, fileName = outFile)