model_num_iterations = modelPickle['model_num_iterations'] kernelString = modelPickle['kernelString'] Q = modelPickle['Q'] economy_save = True pose_index = [''] pose_selection = 0 # # Creates a SAMpy object mySAMpy = SAMDriver_interaction(False, imgH=imgH, imgW=imgW, imgHNew=imgHNew, imgWNew=imgWNew) # # Reading face data, preparation of data and training of the model mySAMpy.readData(dataPath, participantList, pose_index) minImages = mySAMpy.Y.shape[1] Ntr = int(minImages * ratioData / 100) Ntest = minImages - Ntr allPersonsY = mySAMpy.Y allPersonsL = mySAMpy.L for i in range(len(participantList)): #print participantList[i] mySAMpy.Y = allPersonsY[:, :, i, None] mySAMpy.L = allPersonsL[:, :, i, None] (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection,
image_suffix = modelPickle['image_suffix'] model_type = modelPickle['model_type'] model_num_inducing = modelPickle['num_inducing'] model_init_iterations = modelPickle['model_init_iterations'] model_num_iterations = modelPickle['model_num_iterations'] kernelString = modelPickle['kernelString'] Q = modelPickle['Q'] economy_save = True pose_index=[''] pose_selection = 0 # # Creates a SAMpy object mySAMpy = SAMDriver_interaction(False, imgH = imgH, imgW = imgW, imgHNew = imgHNew, imgWNew = imgWNew) # # Reading face data, preparation of data and training of the model mySAMpy.readData(dataPath, participantList, pose_index) minImages = mySAMpy.Y.shape[1] Ntr = int(minImages*ratioData/100) Ntest = minImages - Ntr allPersonsY = mySAMpy.Y; allPersonsL = mySAMpy.L; for i in range(len(participantList)): #print participantList[i] mySAMpy.Y = allPersonsY[:,:,i,None] mySAMpy.L = allPersonsL[:,:,i,None] (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=2) if(i==0):
# Specification of model type and training parameters model_type = 'mrd' model_num_inducing = 30 model_num_iterations = 700 model_init_iterations = 2000 fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(experiment_number) #+ '.pickle' # Enable to save the model and visualise GP nearest neighbour matching save_model=True economy_save = True # ATTENTION!! This is still BETA!! visualise_output=False test_mode = False # Reading face data, preparation of data and training of the model mySAMpy.readData(root_data_dir, participant_index, pose_index) (Yall, Lall, YtestAll, LtestAll) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=experiment_number) Lunique = numpy.unique(mySAMpy.L) L_index = dict() for i in range(len(Lunique)): L_index[Lunique[i]] = numpy.where(mySAMpy.L == i)[0] mm = [] for i in range(len(Lunique)): print('# Considering label: ' + str(Lunique[i])) cur = SAMDriver_interaction(True, imgH = 400, imgW = 400, imgHNew = 200, imgWNew = 200,inputImagePort="/CLM/imageSeg/out", openPorts=False) ############## Y_cur = Yall[L_index[i],:].copy() Ytest_cur = YtestAll[L_index[i],:].copy()
if('.pickle' in modelPath): fname = '/'.join(modelPath.split('/')[:-1]) + '/' + dataPath.split('/')[-1] + '__' + trainName + '__' + model_type + '__exp' + str(experiment_number) else: fname = modelPath + dataPath.split('/')[-1] + '__' + trainName + '__' + model_type + '__exp' + str(experiment_number) #+ '.pickle' print fname # Enable to save the model and visualise GP nearest neighbour matching save_model = False economy_save = True visualise_output = False test_mode = True # Reading face data, preparation of data and training of the model mySAMpy.readData(root_data_dir, participant_index, pose_index) minImages = mySAMpy.Y.shape[1] Ntr = int(minImages*ratioData/100) Ntest = minImages - Ntr allPersonsY = mySAMpy.Y; allPersonsL = mySAMpy.L; for i in range(len(participantList)): #print participantList[i] mySAMpy.Y = allPersonsY[:,:,i,None] mySAMpy.L = allPersonsL[:,:,i,None] (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=experiment_number) if(i==0):