Esempio n. 1
0
def demo(readLocation="../data/sensor_data/late_small.dat"):
    import dataio

    oData = dataio.loadData(readLocation)
    val = bestCorrelation(oData.data, offsetMax=3, offsetMin=1)

    neighbors = createNeighbors(val)

    oData.correlation = val
    oData.neighbors = neighbors

    dataio.saveData(readLocation, oData)
Esempio n. 2
0
def demo(readLocation = "../data/sensor_data/late_small.dat"):
    import dataio
    
    oData = dataio.loadData(readLocation)
    val = bestCorrelation(oData.data, offsetMax = 3, offsetMin = 1)
    
    neighbors = createNeighbors(val)
    
    oData.correlation = val
    oData.neighbors = neighbors
    
    dataio.saveData(readLocation, oData)
Esempio n. 3
0
import bbdata


if __name__ == "__main__":
    readLocation = "../data/old_sensor_data_raw/"
    writeLocation = "../data/sensor_data/ss.dat"
    startTime = "2008-02-15 00:00:00"
    endTime = "2008-02-25 23:59:59"
    
    sensors = [53, 52, 51, 50, 44]
    validDays = [1, 3, 5]
    
    data, start, end = bbparser.rawDataToBinaryExtended(readLocation, \
                                                #bbparser.allSensors, \
                                                sensors, \
                                                startTime, endTime, \
                                                validDays = validDays, \
                                                verbose = True)
                                                
    sTime = time.ctime(start)
    eTime = time.ctime(end)
    
    print "Data made.  Size is " + str(data.shape)
    print "Time goes from " + str(sTime) + " to " + str(eTime)
    
    tmpData = bbdata.Dataset(data)
    tmpData.startTime = sTime
    tmpData.endTime = eTime
    
    dataio.saveData(writeLocation, tmpData)
Esempio n. 4
0
    #directories.append("detectionsOct2007")
    #directories.append("detectionsNov2007")
    #directories.append("detectionsDec2007")
    #directories.append("detectionsJan2008")
    #directories.append("detectionsFeb2008")
    #directories.append("detectionsMar2008")
    #directories.append("detectionsApr2008")

    directories.append("detectionsJan2008-Mar2008")
    #directories.append("detectionsSep2007-Dec2007")

    for i in range(1, 11):
        for j in range(0, 5):
            sensors.append(i*10 + j)
    
    print "Sensors:" + str(len(sensors))
    
    tdMatrix = calculateTDMatrix(readLoc, startTime, endTime, interval, directories, sensors)
    print tdMatrix

    oData = None
    
    try:
        oData = dataio.loadData(fileLoc)
    except:
        oData = bbdata.Dataset(None)
    
    oData.tdMatrix = tdMatrix
    dataio.saveData(fileLoc, oData)

Esempio n. 5
0
 scores = [0, 0]
 entropys = [0, 0]
 
 for i in range(2, 26):
     print "Models:" + str(i) 
     bestScore = -1
     bestModels = []
     bestData = []
     bestOut = []
     
     for j in range(2):
         suppress.suppress(2)
         bm, bd, out = markov_anneal.train(sData, i, states, obs, \
                                 iterations = 9, outliers = False, voidOutput = False)
         suppress.restore(2)
         mea = markov_anneal.modelErrorAverage(bm, bd, obs)
         entropy = markov_anneal.assignedDataEntropy(bd, out)
         score = (2**(sum(mea)/(len(mea) * 1.0)))*entropy
         
         if bestScore == -1 or score < bestScore:
             bestScore = score
             bestEnt = entropy
 
     print "   best score:" + str(bestScore) + "   best entropy:" + str(bestEnt)
     scores.append(bestScore)
     entropys.append(bestEnt)
 
 oData.scores = scores
 oData.entropys = entropys
 dataio.saveData(readLocation, oData)
Esempio n. 6
0
    entropys = [0, 0]

    for i in range(2, 26):
        print "Models:" + str(i)
        bestScore = -1
        bestModels = []
        bestData = []
        bestOut = []

        for j in range(2):
            suppress.suppress(2)
            bm, bd, out = markov_anneal.train(sData, i, states, obs, \
                                    iterations = 9, outliers = False, voidOutput = False)
            suppress.restore(2)
            mea = markov_anneal.modelErrorAverage(bm, bd, obs)
            entropy = markov_anneal.assignedDataEntropy(bd, out)
            score = (2**(sum(mea) / (len(mea) * 1.0))) * entropy

            if bestScore == -1 or score < bestScore:
                bestScore = score
                bestEnt = entropy

        print "   best score:" + str(bestScore) + "   best entropy:" + str(
            bestEnt)
        scores.append(bestScore)
        entropys.append(bestEnt)

    oData.scores = scores
    oData.entropys = entropys
    dataio.saveData(readLocation, oData)
Esempio n. 7
0
    #directories.append("detectionsOct2007")
    #directories.append("detectionsNov2007")
    #directories.append("detectionsDec2007")
    #directories.append("detectionsJan2008")
    #directories.append("detectionsFeb2008")
    #directories.append("detectionsMar2008")
    #directories.append("detectionsApr2008")

    directories.append("detectionsJan2008-Mar2008")
    #directories.append("detectionsSep2007-Dec2007")

    for i in range(1, 11):
        for j in range(0, 5):
            sensors.append(i * 10 + j)

    print "Sensors:" + str(len(sensors))

    tdMatrix = calculateTDMatrix(readLoc, startTime, endTime, interval,
                                 directories, sensors)
    print tdMatrix

    oData = None

    try:
        oData = dataio.loadData(fileLoc)
    except:
        oData = bbdata.Dataset(None)

    oData.tdMatrix = tdMatrix
    dataio.saveData(fileLoc, oData)
Esempio n. 8
0
Simple program that should only need to be run once for each data run.
Takes data from a given location and calls all necessary functions to 
prepare the data for being run with different hmm trails.
"""

import dataio
import bbdata

calcCombineData = True
calcCompressedData = True

combineAmount = 2
readLocation = "../data/sensor_data/ss.dat"
writeLocation = "../data/sensor_data/ss.dat"

if __name__ == "__main__":

    oData = dataio.loadData(readLocation)
    oData.ad = oData.data

    if calcCombineData:
        print "Calculating combine data with size of " + str(combineAmount)
        averagedData = bbdata.combineData(oData.data, combineAmount)
        oData.ad = averagedData
        dataio.saveData(writeLocation, oData)

    if calcCompressedData:
        print "Calculating compressed data."
        compressedData = bbdata.compressData(oData.ad)
        oData.cd = compressedData
        dataio.saveData(writeLocation, oData)
Esempio n. 9
0
import correlation
import bbdata

if __name__ == "__main__":
    readLocation = "../data/old_sensor_data_raw/"
    writeLocation = "../data/sensor_data/ss.dat"
    startTime = "2008-02-15 00:00:00"
    endTime = "2008-02-25 23:59:59"

    sensors = [53, 52, 51, 50, 44]
    validDays = [1, 3, 5]

    data, start, end = bbparser.rawDataToBinaryExtended(readLocation, \
                                                #bbparser.allSensors, \
                                                sensors, \
                                                startTime, endTime, \
                                                validDays = validDays, \
                                                verbose = True)

    sTime = time.ctime(start)
    eTime = time.ctime(end)

    print "Data made.  Size is " + str(data.shape)
    print "Time goes from " + str(sTime) + " to " + str(eTime)

    tmpData = bbdata.Dataset(data)
    tmpData.startTime = sTime
    tmpData.endTime = eTime

    dataio.saveData(writeLocation, tmpData)
Esempio n. 10
0
 def save(self):
     dataio.saveData()
     return "Data Saved"
Esempio n. 11
0
        data[i * readings:i * readings + readings, :] = tmpData

    return data


if __name__ == "__main__":

    dist = []
    saveLocation = "../data/generated/small.dat"
    sensors = 5
    readings = 250

    #dist.append(Distribution(nLoiter = 20, \
    #                        mLoiter = 5, \
    #                        stdLoiter = 0))

    dist.append(Distribution(nWalkLeft = 20, \
                            mWalkLeft = 5, \
                            stdWalkLeft = 0))

    dist.append(Distribution(nWalkRight = 20, \
                            mWalkRight = 5, \
                            stdWalkRight = 0))

    data = combineData(dist, readings, sensors)
    tmpData = bbdata.Dataset(data)
    dataio.saveData(saveLocation, tmpData)

    visualizer.drawData(data)
    print "Generated image."
Esempio n. 12
0
    for d in validDays:
        print "Day " + str(d)
        print "  Getting data."
        #Iterate over all valid days.
        cd, td = bbdata.comp(st, et, \
                vDays = [d], \
                comp = compress, \
                sens = sensors)
    
        print "  Splitting."
        #Get the split calculation finished.
        sData = markov_anneal.splitActivityMax(cd, td, splitLen)
        
        print "  Calculating."
        sigma = IntegerRange(0, 2**len(sensors))
        val, counts = analysis.ratio(sData.values(), models, sigma)

        tdMatrix.append(counts)
        
        
    #Save output matrix.
    foo = bbdata.Dataset(None)
    foo.tdMatrix = numpy.array(tdMatrix)
    
    dataio.saveData(writeLocation, foo)
    
        
        
        
    
Esempio n. 13
0
    
    return data



if __name__ == "__main__":
    
    dist = []
    saveLocation = "../data/generated/small.dat"
    sensors = 5
    readings = 250

    #dist.append(Distribution(nLoiter = 20, \
    #                        mLoiter = 5, \
    #                        stdLoiter = 0))

    dist.append(Distribution(nWalkLeft = 20, \
                            mWalkLeft = 5, \
                            stdWalkLeft = 0))
    
    dist.append(Distribution(nWalkRight = 20, \
                            mWalkRight = 5, \
                            stdWalkRight = 0))
                            
    data = combineData(dist, readings, sensors)
    tmpData = bbdata.Dataset(data)
    dataio.saveData(saveLocation, tmpData)
    
    visualizer.drawData(data)
    print "Generated image."