Esempio n. 1
0
def FetchBU3DData(DataPath,
                  facePointMult3=21081,
                  fileCnt=1000,
                  printTime=False,
                  valCnt=400,
                  trainDirName="train_Resampled",
                  testDirName="val_Resampled",
                  landmarkDirName="landmarks"):
    startTime = time.time()
    if printTime:
        print("Loading Data...")
    X = np.zeros((fileCnt, facePointMult3 + 83 * 3))
    Y = np.zeros((valCnt, facePointMult3 + 83 * 3))
    Y_ = np.zeros((valCnt, facePointMult3 + 83 * 3))
    cnt = 0
    trainDataPath = os.path.join(DataPath, trainDirName)
    testDataPath = os.path.join(DataPath, testDirName)
    landmarkPath = os.path.join(DataPath, landmarkDirName)
    # print(landmarkPath)
    list_dirs = os.walk(trainDataPath)
    for _, _, files in list_dirs:
        for f in files:
            if f[-3:] == 'xyz':
                if cnt >= fileCnt: break
                bndFile = f[:-3] + 'bnd'
                xyzData = dataio.loadData(os.path.join(trainDataPath, f))
                bndData = dataio.loadData(os.path.join(landmarkPath, bndFile))
                F = combineData(xyzData, bndData)
                X[cnt, :] = F.ravel()
                cnt += 1
    list_dirs = os.walk(testDataPath)
    cnt = 0
    for _, _, files in list_dirs:
        for f in files:
            if f[-3:] == 'xyz':
                if cnt >= valCnt: break
                bndFile = f[:-3] + 'bnd'
                xyzData = dataio.loadData(os.path.join(testDataPath, f))
                bndData = np.zeros((83, 3))
                bndTruth = dataio.loadData(os.path.join(landmarkPath, bndFile))
                F = combineData(xyzData, bndData)
                F_GT = combineData(xyzData, bndTruth)
                Y_[cnt, :] = F.ravel()
                Y[cnt, :] = F_GT.ravel()
                cnt += 1
    print("Data loaded.\nTrain Shape:", X.shape, " | Test Shape:", Y.shape)
    if printTime:
        print("Cost {} seconds.".format(time.time() - startTime))
    return X, Y_, Y  #Train, toLearn, GT
Esempio n. 2
0
def demo(readLocation="../data/sensor_data/late_small.dat"):
    import dataio

    oData = dataio.loadData(readLocation)
    val = bestCorrelation(oData.data, offsetMax=3, offsetMin=1)

    neighbors = createNeighbors(val)

    oData.correlation = val
    oData.neighbors = neighbors

    dataio.saveData(readLocation, oData)
Esempio n. 3
0
def demo(readLocation = "../data/sensor_data/late_small.dat"):
    import dataio
    
    oData = dataio.loadData(readLocation)
    val = bestCorrelation(oData.data, offsetMax = 3, offsetMin = 1)
    
    neighbors = createNeighbors(val)
    
    oData.correlation = val
    oData.neighbors = neighbors
    
    dataio.saveData(readLocation, oData)
Esempio n. 4
0
def FetchAllData(TrainDataPath):
    # ExportFace = "ExportBnd"
    curPath = os.getcwd()
    FileCnt = 1000
    X = np.zeros((FileCnt, 23349))
    list_dirs = os.walk(TrainDataPath)
    i = 0
    for root, _, files in list_dirs:
        for f in files:
            if f[-3:] == 'xyz':
                if i >= FileCnt: break
                # print(f)
                bndFile = f[:-3] + 'bnd'
                xyzData = dataio.loadData(os.path.join(TrainDataPath, f))
                bndData = dataio.loadData(os.path.join(TrainDataPath, bndFile),
                                          spliter='\t\t')
                # print(bndData.shape)
                F = combineData(xyzData, bndData)
                X[i, :] = F.ravel()
                i += 1

    # print(X)
    return X
Esempio n. 5
0
def FetchXYZData(TestDataPath):
    curPath = os.getcwd()
    FileCnt = 20
    X = np.zeros((FileCnt, 23349))
    list_dirs = os.walk(TestDataPath)
    i = 0
    for root, _, files in list_dirs:
        for f in files:
            if f[-3:] == 'xyz':
                if i >= FileCnt: break
                print(f)
                xyzData = dataio.loadData(os.path.join(TestDataPath, f))
                bndData = np.zeros((83, 3))
                #print(bndData.shape)
                F = combineData(xyzData, bndData, normalize=False)
                X[i, :] = F.ravel()
                i += 1
    # print(X)
    return X
Esempio n. 6
0
    #directories.append("detectionsOct2007")
    #directories.append("detectionsNov2007")
    #directories.append("detectionsDec2007")
    #directories.append("detectionsJan2008")
    #directories.append("detectionsFeb2008")
    #directories.append("detectionsMar2008")
    #directories.append("detectionsApr2008")

    directories.append("detectionsJan2008-Mar2008")
    #directories.append("detectionsSep2007-Dec2007")

    for i in range(1, 11):
        for j in range(0, 5):
            sensors.append(i*10 + j)
    
    print "Sensors:" + str(len(sensors))
    
    tdMatrix = calculateTDMatrix(readLoc, startTime, endTime, interval, directories, sensors)
    print tdMatrix

    oData = None
    
    try:
        oData = dataio.loadData(fileLoc)
    except:
        oData = bbdata.Dataset(None)
    
    oData.tdMatrix = tdMatrix
    dataio.saveData(fileLoc, oData)

Esempio n. 7
0
import markov_anneal
import dataio
import warnings
import os
import random
import suppress
warnings.simplefilter("ignore")

readLocation = "../data/sensor_data/small_54_64.dat"

splitLen = 8

if __name__ == "__main__":

    oData = dataio.loadData(readLocation)
    obs = 2**oData.data.shape[1]
    states = splitLen

    sData = markov_anneal.splitActivityMax(oData.cd[0:50000], splitLen)
    
    scores = [0, 0]
    entropys = [0, 0]
    
    for i in range(2, 26):
        print "Models:" + str(i) 
        bestScore = -1
        bestModels = []
        bestData = []
        bestOut = []
        
Esempio n. 8
0
import markov_anneal
import dataio
import warnings
import os
import random
import suppress
warnings.simplefilter("ignore")

readLocation = "../data/sensor_data/small_54_64.dat"

splitLen = 8

if __name__ == "__main__":

    oData = dataio.loadData(readLocation)
    obs = 2**oData.data.shape[1]
    states = splitLen

    sData = markov_anneal.splitActivityMax(oData.cd[0:50000], splitLen)

    scores = [0, 0]
    entropys = [0, 0]

    for i in range(2, 26):
        print "Models:" + str(i)
        bestScore = -1
        bestModels = []
        bestData = []
        bestOut = []
Esempio n. 9
0
    #directories.append("detectionsOct2007")
    #directories.append("detectionsNov2007")
    #directories.append("detectionsDec2007")
    #directories.append("detectionsJan2008")
    #directories.append("detectionsFeb2008")
    #directories.append("detectionsMar2008")
    #directories.append("detectionsApr2008")

    directories.append("detectionsJan2008-Mar2008")
    #directories.append("detectionsSep2007-Dec2007")

    for i in range(1, 11):
        for j in range(0, 5):
            sensors.append(i * 10 + j)

    print "Sensors:" + str(len(sensors))

    tdMatrix = calculateTDMatrix(readLoc, startTime, endTime, interval,
                                 directories, sensors)
    print tdMatrix

    oData = None

    try:
        oData = dataio.loadData(fileLoc)
    except:
        oData = bbdata.Dataset(None)

    oData.tdMatrix = tdMatrix
    dataio.saveData(fileLoc, oData)
Esempio n. 10
0
import myICP
import dataio

data1 = dataio.loadData("1_calib.asc")
data2 = dataio.loadData("2_calib.asc")

_, _, data2_ = myICP.icp(data2,
                         data1,
                         maxIteration=50,
                         tolerance=0.00001,
                         controlPoints=1000)
dataio.outputData("2_icp.asc", data2_)
Esempio n. 11
0
st = "2008-02-01 00:00:00"
et = "2008-03-31 23:59:59"
st = datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
et = datetime.datetime.strptime(et, "%Y-%m-%d %H:%M:%S")

sensors = [53, 52, 51, 50, 44]
validDays = [0, 1, 2, 3, 4, 5, 6]
compress = 2
counts = [0] * len(validDays)
splitLen = 8
tdMatrix = []


if __name__ == "__main__":
    
    mData = dataio.loadData(modelFile)
    mData.matrixToModel(mData.modelList)
    models = mData.models

    for d in validDays:
        print "Day " + str(d)
        print "  Getting data."
        #Iterate over all valid days.
        cd, td = bbdata.comp(st, et, \
                vDays = [d], \
                comp = compress, \
                sens = sensors)
    
        print "  Splitting."
        #Get the split calculation finished.
        sData = markov_anneal.splitActivityMax(cd, td, splitLen)