コード例 #1
0
# Specification of model type and training parameters
model_type = 'mrd'
model_num_inducing = 30
model_num_iterations = 700
model_init_iterations = 2000
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(experiment_number) #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model=True
economy_save = True # ATTENTION!! This is still BETA!!
visualise_output=False
test_mode = False

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
(Yall, Lall, YtestAll, LtestAll) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=experiment_number)

Lunique = numpy.unique(mySAMpy.L)
L_index = dict()
for i in range(len(Lunique)):
    L_index[Lunique[i]] = numpy.where(mySAMpy.L == i)[0]

mm = []

for i in range(len(Lunique)):
    print('# Considering label: ' + str(Lunique[i]))
    cur = SAMDriver_interaction(True, imgH = 400, imgW = 400, imgHNew = 200, imgWNew = 200,inputImagePort="/CLM/imageSeg/out", openPorts=False)
    ##############
    Y_cur = Yall[L_index[i],:].copy()
    Ytest_cur = YtestAll[L_index[i],:].copy()
    L_cur = Lall[L_index[i],:].copy()
コード例 #2
0
ファイル: interaction_faces.py プロジェクト: pecfw/wysiwyd
# # Reading face data, preparation of data and training of the model
mySAMpy.readData(dataPath, participantList, pose_index)

minImages = mySAMpy.Y.shape[1]
Ntr = int(minImages * ratioData / 100)
Ntest = minImages - Ntr

allPersonsY = mySAMpy.Y
allPersonsL = mySAMpy.L

for i in range(len(participantList)):
    #print participantList[i]
    mySAMpy.Y = allPersonsY[:, :, i, None]
    mySAMpy.L = allPersonsL[:, :, i, None]
    (Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type,
                                                               Ntr,
                                                               pose_selection,
                                                               randSeed=2)

    if (i == 0):
        Yall = Yalli.copy()
        Lall = Lalli.copy()
        YtestAll = YtestAlli.copy()
        LtestAll = LtestAlli.copy()
    else:
        Yall = np.vstack([Yall, Yalli])
        Lall = np.vstack([Lall, Lalli])
        YtestAll = np.vstack([YtestAll, YtestAlli])
        LtestAll = np.vstack([LtestAll, LtestAlli])

allPersonsY = None
alPersonsL = None
コード例 #3
0
pose_selection = 0

# Specification of model type and training parameters
model_type = 'mrd'
model_num_inducing = 30
model_num_iterations = 150
model_init_iterations = 400
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(experiment_number) #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model=True
visualise_output=True

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
mySAMpy.prepareData(model_type, Ntr, pose_selection)
mySAMpy.training(model_num_inducing, model_num_iterations, model_init_iterations, fname, save_model)

while( not(yarp.Network.isConnected("/speechInteraction/behaviour:o","/sam/face/interaction:i")) ):
    print "Waiting for connection with behaviour port..."
    pass


# This is for visualising the mapping of the test face back to the internal memory
if visualise_output: 
    ax = mySAMpy.SAMObject.visualise()
    visualiseInfo=dict()
    visualiseInfo['ax']=ax
    ytmp = mySAMpy.SAMObject.recall(0)
    ytmp = numpy.reshape(ytmp,(mySAMpy.imgHeightNew,mySAMpy.imgWidthNew))
    fig_nn = pb.figure()
コード例 #4
0
# # Reading face data, preparation of data and training of the model
mySAMpy.readData(dataPath, participantList, pose_index)

minImages = mySAMpy.Y.shape[1]
Ntr = int(minImages*ratioData/100)
Ntest = minImages - Ntr

allPersonsY = mySAMpy.Y;
allPersonsL = mySAMpy.L;

for i in range(len(participantList)):
	#print participantList[i]
	mySAMpy.Y = allPersonsY[:,:,i,None]
	mySAMpy.L = allPersonsL[:,:,i,None]
	(Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=2)

	if(i==0):
		Yall = Yalli.copy();
		Lall = Lalli.copy();
		YtestAll = YtestAlli.copy()
		LtestAll = LtestAlli.copy()
	else:
		Yall = np.vstack([Yall,Yalli])
		Lall = np.vstack([Lall,Lalli])
		YtestAll = np.vstack([YtestAll,YtestAlli])
		LtestAll = np.vstack([LtestAll, LtestAlli])

allPersonsY = None
alPersonsL = None
コード例 #5
0
# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)

minImages = mySAMpy.Y.shape[1]
Ntr = int(minImages*ratioData/100)
Ntest = minImages - Ntr

allPersonsY = mySAMpy.Y;
allPersonsL = mySAMpy.L;

for i in range(len(participantList)):
	#print participantList[i]
	mySAMpy.Y = allPersonsY[:,:,i,None]
	mySAMpy.L = allPersonsL[:,:,i,None]
	(Yalli, Lalli, YtestAlli, LtestAlli) = mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=experiment_number)

	if(i==0):
		Yall = Yalli.copy();
		Lall = Lalli.copy();
		YtestAll = YtestAlli.copy()
		LtestAll = LtestAlli.copy()
	else:
		Yall = np.vstack([Yall,Yalli])
		Lall = np.vstack([Lall,Lalli])
		YtestAll = np.vstack([YtestAll,YtestAlli])
		LtestAll = np.vstack([LtestAll, LtestAlli])

allPersonsY = None
alPersonsL = None
mm = []
コード例 #6
0
model_type = 'mrd'
model_num_inducing = 30
model_num_iterations = 150
model_init_iterations = 400
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(
    experiment_number)  #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model = True
economy_save = True  # ATTENTION!! This is still BETA!!
visualise_output = True

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
mySAMpy.prepareData(model_type,
                    Ntr,
                    pose_selection,
                    randSeed=experiment_number)
mySAMpy.training(model_num_inducing, model_num_iterations,
                 model_init_iterations, fname, save_model, economy_save)

if yarpRunning:
    while (not (yarp.Network.isConnected("/speechInteraction/behaviour:o",
                                         "/sam/face/interaction:i"))):
        print "Waiting for connection with behaviour port..."
        pass

# This is for visualising the mapping of the test face back to the internal memory
if visualise_output:
    ax = mySAMpy.SAMObject.visualise()
    visualiseInfo = dict()
    visualiseInfo['ax'] = ax
コード例 #7
0
# Specification of model type and training parameters
model_type = 'mrd'
model_num_inducing = 30
model_num_iterations = 150
model_init_iterations = 400
fname = modelPath + '/models/' + 'mActions_' + model_type + '_exp' + str(experiment_number) #+ '.pickle'

# Enable to save the model and visualise GP nearest neighbour matching
save_model=True
economy_save = True # ATTENTION!! This is still BETA!!
visualise_output=True

# Reading face data, preparation of data and training of the model
mySAMpy.readData(root_data_dir, participant_index, pose_index)
mySAMpy.prepareData(model_type, Ntr, pose_selection, randSeed=experiment_number)
mySAMpy.training(model_num_inducing, model_num_iterations, model_init_iterations, fname, save_model, economy_save)


if yarpRunning:
    while( not(yarp.Network.isConnected("/speechInteraction/behaviour:o","/sam/face/interaction:i")) ):
        print "Waiting for connection with behaviour port..."
        pass


# This is for visualising the mapping of the test face back to the internal memory
if visualise_output: 
    ax = mySAMpy.SAMObject.visualise()
    visualiseInfo=dict()
    visualiseInfo['ax']=ax
    ytmp = mySAMpy.SAMObject.recall(0)