imgHNew=imgHNew, imgWNew=imgWNew) # # Reading face data, preparation of data and training of the model mySAMpy.readData(dataPath, participantList, pose_index) minImages = mySAMpy.Y.shape[1] Ntr = int(minImages * ratioData / 100) Ntest = minImages - Ntr allPersonsY = mySAMpy.Y allPersonsL = mySAMpy.L for i in range(len(participantList)): #print participantList[i] mySAMpy.Y = allPersonsY[:, :, i, None] mySAMpy.L = allPersonsL[:, :, i, None] (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=2) if (i == 0): Yall = Yalli.copy() Lall = Lalli.copy() YtestAll = YtestAlli.copy() LtestAll = LtestAlli.copy() else: Yall = np.vstack([Yall, Yalli]) Lall = np.vstack([Lall, Lalli]) YtestAll = np.vstack([YtestAll, YtestAlli])
# # Creates a SAMpy object mySAMpy = SAMDriver_interaction(False, imgH = imgH, imgW = imgW, imgHNew = imgHNew, imgWNew = imgWNew) # # Reading face data, preparation of data and training of the model mySAMpy.readData(dataPath, participantList, pose_index) minImages = mySAMpy.Y.shape[1] Ntr = int(minImages*ratioData/100) Ntest = minImages - Ntr allPersonsY = mySAMpy.Y; allPersonsL = mySAMpy.L; for i in range(len(participantList)): #print participantList[i] mySAMpy.Y = allPersonsY[:,:,i,None] mySAMpy.L = allPersonsL[:,:,i,None] (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=2) if(i==0): Yall = Yalli.copy(); Lall = Lalli.copy(); YtestAll = YtestAlli.copy() LtestAll = LtestAlli.copy() else: Yall = np.vstack([Yall,Yalli]) Lall = np.vstack([Lall,Lalli]) YtestAll = np.vstack([YtestAll,YtestAlli]) LtestAll = np.vstack([LtestAll, LtestAlli]) allPersonsY = None
# Normalise test data similarly to training data Ytestn_cur = Ytest_cur - Ymean_cur Ytestn_cur /= Ystd_cur cur.Ymean = Ymean_cur cur.Ystd = Ystd_cur # As above but for the labels #Lmean_cur = L_cur.mean() #Ln_cur = L_cur - Lmean_cur #Lstd_cur = Ln_cur.std() #Ln_cur /= Lstd_cur #Ltestn_cur = Ltest_cur - Lmean_cur #Ltestn_cur /= Lstd_cur cur.X=None cur.Y = {'Y':Yn_cur} cur.Ytestn = {'Ytest':Ytestn_cur} cur.Ltest = {'Ltest':Ltest_cur} fname_cur = fname + '_L' + str(i) cur.training(model_num_inducing, model_num_iterations, model_init_iterations, fname_cur, save_model, economy_save) mm.append(cur) ss = []; sstest = []; for i in range(len(Lunique)): for j in range(len(Lunique)): ss = mm[i].SAMObject.familiarity(mm[j].Y['Y']) print('Familiarity of model ' + participantList[i] + ' given label: ' + participantList[j] + ' using training data is: ' + str(ss)) print("") print("")
# Normalise test data similarly to training data Ytestn_cur = Ytest_cur - Ymean_cur Ytestn_cur /= Ystd_cur cur.Ymean = Ymean_cur cur.Ystd = Ystd_cur # As above but for the labels #Lmean_cur = L_cur.mean() #Ln_cur = L_cur - Lmean_cur #Lstd_cur = Ln_cur.std() #Ln_cur /= Lstd_cur #Ltestn_cur = Ltest_cur - Lmean_cur #Ltestn_cur /= Lstd_cur cur.X = None cur.Y = {'Y': Yn_cur} cur.Ytestn = {'Ytest': Ytestn_cur} cur.Ltest = {'Ltest': Ltest_cur} fname_cur = fname + '_L' + str(i) cur.training(model_num_inducing, model_num_iterations, model_init_iterations, fname_cur, save_model, economy_save) mm.append(cur) ss = [] sstest = [] for i in range(len(Lunique)): for j in range(len(Lunique)): ss = mm[i].SAMObject.familiarity(mm[j].Y['Y']) print('Familiarity of model ' + participantList[i] + ' given label: ' + participantList[j] + ' using training data is: ' + str(ss)) print("")