cur.Ystd = Ystd_cur
    # As above but for the labels
    #Lmean_cur = L_cur.mean()
    #Ln_cur = L_cur - Lmean_cur
    #Lstd_cur = Ln_cur.std()
    #Ln_cur /= Lstd_cur
    #Ltestn_cur = Ltest_cur - Lmean_cur
    #Ltestn_cur /= Lstd_cur

    cur.X=None
    cur.Y = {'Y':Yn_cur}
    cur.Ytestn = {'Ytest':Ytestn_cur}
    cur.Ltest = {'Ltest':Ltest_cur}

    fname_cur = fname + '_L' + str(i)
    cur.training(model_num_inducing, model_num_iterations, model_init_iterations, fname_cur, save_model, economy_save)
    mm.append(cur)
    ss = [];
    sstest = [];
for i in range(len(Lunique)):
    for j in range(len(Lunique)):
        ss = mm[i].SAMObject.familiarity(mm[j].Y['Y'])
        print('Familiarity of model ' + participantList[i] + ' given label: ' + participantList[j] + ' using training data is: ' + str(ss))
    print("")

print("")
print("")

for i in range(len(Lunique)):
    for j in range(len(Lunique)):
        sstest = mm[i].SAMObject.familiarity(mm[j].Ytestn['Ytest'])
Ejemplo n.º 2
0
# Specification of model type and training parameters
model_type = 'mrd'
model_num_inducing = 30
model_num_iterations = 150
model_init_iterations = 400
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(experiment_number) #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model=True
visualise_output=True

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
mySAMpy.prepareData(model_type, Ntr, pose_selection)
mySAMpy.training(model_num_inducing, model_num_iterations, model_init_iterations, fname, save_model)

while( not(yarp.Network.isConnected("/speechInteraction/behaviour:o","/sam/face/interaction:i")) ):
    print "Waiting for connection with behaviour port..."
    pass


# This is for visualising the mapping of the test face back to the internal memory
if visualise_output: 
    ax = mySAMpy.SAMObject.visualise()
    visualiseInfo=dict()
    visualiseInfo['ax']=ax
    ytmp = mySAMpy.SAMObject.recall(0)
    ytmp = numpy.reshape(ytmp,(mySAMpy.imgHeightNew,mySAMpy.imgWidthNew))
    fig_nn = pb.figure()
    pb.title('Training NN')
Ejemplo n.º 3
0
model_init_iterations = 400
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(
    experiment_number)  #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model = True
economy_save = True  # ATTENTION!! This is still BETA!!
visualise_output = True

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
mySAMpy.prepareData(model_type,
                    Ntr,
                    pose_selection,
                    randSeed=experiment_number)
mySAMpy.training(model_num_inducing, model_num_iterations,
                 model_init_iterations, fname, save_model, economy_save)

if yarpRunning:
    while (not (yarp.Network.isConnected("/speechInteraction/behaviour:o",
                                         "/sam/face/interaction:i"))):
        print "Waiting for connection with behaviour port..."
        pass

# This is for visualising the mapping of the test face back to the internal memory
if visualise_output:
    ax = mySAMpy.SAMObject.visualise()
    visualiseInfo = dict()
    visualiseInfo['ax'] = ax
    ytmp = mySAMpy.SAMObject.recall(0)
    ytmp = numpy.reshape(ytmp, (mySAMpy.imgHeightNew, mySAMpy.imgWidthNew))
    fig_nn = pb.figure()
Ejemplo n.º 4
0
	#Lmean_cur = L_cur.mean()
	#Ln_cur = L_cur - Lmean_cur
	#Lstd_cur = Ln_cur.std()
	#Ln_cur /= Lstd_cur
	#Ltestn_cur = Ltest_cur - Lmean_cur
	#Ltestn_cur /= Lstd_cur

	cur.X = None
	cur.Y = {'Y':Yn_cur}
	cur.Ytestn = {'Ytest':Ytestn_cur}
	cur.Ltest = {'Ltest':Ltest_cur}
	print 'training data' + str(cur.Y['Y'].shape)
	print 'testing data' + str(cur.Ytestn['Ytest'].shape)

	fname_cur = fname + '__L' + str(i)
	cur.training(model_num_inducing, model_num_iterations, model_init_iterations, fname_cur, save_model, economy_save, keepIfPresent=False, kernelStr=kernelString)
	mm.append(cur)
	ss = [];
	sstest = [];
	print

#Ntest = 10
print Ntest
result = np.zeros([len(participantList),Ntest,len(participantList)])
responseIdx = np.zeros([result.shape[0],result.shape[1]])
responseVal = np.zeros([result.shape[0],result.shape[1]])
confusionMatrix = np.zeros([result.shape[0],result.shape[0]])

for i in range(result.shape[0]):
	print
	print('Participant ' + str(i) + ': ' + participantList[i])
Ejemplo n.º 5
0
    cur.Ystd = Ystd_cur
    # As above but for the labels
    #Lmean_cur = L_cur.mean()
    #Ln_cur = L_cur - Lmean_cur
    #Lstd_cur = Ln_cur.std()
    #Ln_cur /= Lstd_cur
    #Ltestn_cur = Ltest_cur - Lmean_cur
    #Ltestn_cur /= Lstd_cur

    cur.X = None
    cur.Y = {'Y': Yn_cur}
    cur.Ytestn = {'Ytest': Ytestn_cur}
    cur.Ltest = {'Ltest': Ltest_cur}

    fname_cur = fname + '_L' + str(i)
    cur.training(model_num_inducing, model_num_iterations,
                 model_init_iterations, fname_cur, save_model, economy_save)
    mm.append(cur)
    ss = []
    sstest = []
for i in range(len(Lunique)):
    for j in range(len(Lunique)):
        ss = mm[i].SAMObject.familiarity(mm[j].Y['Y'])
        print('Familiarity of model ' + participantList[i] + ' given label: ' +
              participantList[j] + ' using training data is: ' + str(ss))
    print("")

print("")
print("")

for i in range(len(Lunique)):
    for j in range(len(Lunique)):