Beispiel #1
0
	def configure(self, rf):
		self.handlerPort.open("/ActivityRecognitionModule")
		self.attach(self.handlerPort)

		self.list_port = []
		self.cback = []

		path_model = rf.find('path_model').toString()
		name_model = rf.find('name_model').toString()
		self.statePort = yarp.BufferedPortBottle()
		self.statePort.open('/activity_recognition/state:o')
		self.probPort = yarp.BufferedPortBottle()
		self.probPort.open('/activity_recognition/probabilities:o')
		self.model = ModelHMM()
		self.model.load_model(path_model + '/' + name_model)
		self.list_states = self.model.get_list_states()
		self.buffer_model = [[]]

		size_buffer = int(rf.find('size_buffer').toString())

		signals = rf.findGroup("Signals").tail().toString().replace(')', '').replace('(', '').split(' ')
		nb_port = int(len(signals))
		nb_active_port = 0

		for signal in signals:
			info_signal = rf.findGroup(signal)
			is_enabled = int(info_signal.find('enable').toString())

			if(is_enabled):
				list_items = info_signal.findGroup('list').tail().toString().split(' ')
				input_port_name = info_signal.find('output_port').toString()

				if(input_port_name == ''):
					input_port_name = info_signal.find('input_port').toString()

				if((list_items[0] == 'all') or (list_items[0] == '')):
					self.list_port.append(yarp.BufferedPortBottle())
					self.list_port[nb_active_port].open("/activity_recognition" + input_port_name + ':i')
					self.cback.append(CallbackData(size_buffer))
					self.list_port[nb_active_port].useCallback(self.cback[nb_active_port])
					yarp.Network.connect("/processing" + input_port_name + ':o', self.list_port[nb_active_port].getName())
					nb_active_port += 1
					self.buffer_model.append([])

				else:
					for item in list_items:
						self.list_port.append(yarp.BufferedPortBottle())
						self.list_port[nb_active_port].open("/activity_recognition" + input_port_name + '/' + item + ':i')
						self.cback.append(CallbackData(size_buffer))
						self.list_port[nb_active_port].useCallback(self.cback[nb_active_port])
						self.buffer_model.append([])
						yarp.Network.connect("/processing" + input_port_name + '/' + item  + ':o', self.list_port[nb_active_port].getName())
						nb_active_port += 1


		self.flag_model = np.zeros(nb_active_port)
		self.obs = []

		return True
    def __init__(self, rf):
        self.input_port = '/processing/eglove/data/Forces:o'

        self.port = yarp.BufferedPortBottle()
        self.port.open('/DetectionContact')

        path_model = rf.find('path_model').toString()
        name_model = rf.find('name_model').toString()

        self.model = ModelHMM()
        self.model.load_model(path_model + '/' + name_model)
        self.list_states = self.model.get_list_states()
        self.buffer_model = []
        self.obs = []
        print(self.list_states)

        yarp.Network.connect(self.input_port, self.port.getName())
class DetectionContact(object):
    def __init__(self, rf):
        self.input_port = '/processing/eglove/data/Forces:o'

        self.port = yarp.BufferedPortBottle()
        self.port.open('/DetectionContact')

        path_model = rf.find('path_model').toString()
        name_model = rf.find('name_model').toString()

        self.model = ModelHMM()
        self.model.load_model(path_model + '/' + name_model)
        self.list_states = self.model.get_list_states()
        self.buffer_model = []
        self.obs = []
        print(self.list_states)

        yarp.Network.connect(self.input_port, self.port.getName())

    def update(self):
        b_in = self.port.read()
        data = b_in.toString().split(' ')

        # data = self.cback[i].get_data()

        if (len(data) > 0):
            self.buffer_model = data
            # self.flag_model[i] = 1

            del data[0]
            data_model = list(map(float, data))

            self.obs.append(data_model)

            if (len(self.obs) > 500):
                del self.obs[0]

        if (len(self.obs) > 10):
            state = self.model.predict_states(self.obs)
            b_state = self.port.prepare()
            b_state.clear()
            b_state.addInt(1)
            index_state = int(state[-1])
            b_state.addInt(index_state)
            b_state.addString(self.list_states[index_state])
            self.port.write()
Beispiel #4
0
        while (os.path.isfile(file_wrapper)):
            print(file_wrapper)
            list_features_final = get_best_features(file_wrapper)[0]
            dim_features = np.ones(len(list_features_final))

            for n_iter in range(nbr_cross_val):
                data_ref1, labels_ref, data_test, labels_test, id_train, id_test = tools.split_data_base2(
                    data_win2, real_labels[num_track], ratio)

                data_ref = []
                for data in data_ref1:
                    df = pd.DataFrame(data)
                    df.columns = list_features
                    data_ref.append(df[list_features_final].values)

                model = ModelHMM()
                model.train(data_ref, labels_ref, list_features_final,
                            dim_features)

                data_ref = []
                for data in data_test:
                    df = pd.DataFrame(data)
                    df.columns = list_features
                    data_ref.append(df[list_features_final].values)

                pred_labels, proba = model.test_model(data_ref)

                F1_temp = []
                for i in range(len(labels_test)):
                    F1_temp.append(
                        tools.compute_F1_score(labels_test[i], pred_labels[i],
Beispiel #5
0
        TP = 0

        short = 0

        transition_error = []
        short_transition_error = 0

        MCC = 0
        F1_fisher = []
        F1_wrapper = []

        F1_f = []
        F1_w = []

        if (nbr_cross_val == 0):
            model = ModelHMM()
            model.train(data_win, real_labels, best_features, dim_features)

            if (save):
                model.save_model(path_model, name_model, "load_handling")

        for n_subject in range(len(list_participant)):
            data_reduce = deepcopy(data_win)
            labels_reduce = deepcopy(real_labels)

            if (test_generalisation):
                data_gen = []
                labels_gen = []
                seq_subject = 0
                count = []
                for i in range(len(info_participant)):
                data_win = []

                for j in range(len(df_all_data)):
                    data_win.append(df_all_data[j][sub_list_features].values)

                F1_S = 0
                MCC = 0

                confusion_matrix = np.zeros(
                    (len(list_states), len(list_states)))
                for nbr_test in range(nbr_cross_val):

                    data_ref, labels_ref, data_test, labels_test, id_train, id_test = tools.split_data_base2(
                        data_win, real_labels, ratio)

                    model = ModelHMM()
                    model.train(data_ref, labels_ref, sub_list_features,
                                np.ones(len(sub_list_features)))

                    #### Test
                    time_test = []
                    for id_subject in id_test:
                        time_test.append(timestamps[id_subject])

                    predict_labels, proba = model.test_model(data_test)

                    for i in range(len(predict_labels)):
                        conf_mat = tools.compute_confusion_matrix(
                            predict_labels[i], labels_test[i], list_states)
                        confusion_matrix += conf_mat
                        MCC += tools.compute_MCC_score(
        data_train, labels_train, data_test, labels_test, id_train, id_test = tools.split_data_base(
            data_win, real_labels[num_track], ratio_split)

        # Keep only the data related to the final list of features
        train_set = tools.reduce_data_to_features(data_train, list_features,
                                                  list_features_final)
        test_set = tools.reduce_data_to_features(data_test, list_features,
                                                 list_features_final)
        dim_features = np.ones(len(list_features_final))

        print('DEBUG  list of final features ', list_features_final)
        plt.plot(train_set[0][:, 3])
        plt.show()

        # Training the model
        model = ModelHMM()
        model.train(train_set, labels_train, list_features_final, dim_features)

        # Testing the model
        pred_labels, proba = model.test_model(test_set)

        #debug sere
        #pred_labels, proba les ecrire dans un fichier

        F1_temp = []
        for i in range(len(labels_test)):
            F1_temp.append(
                tools.compute_F1_score(labels_test[i], pred_labels[i],
                                       list_states[num_track]))

        F1_score.append(np.mean(F1_temp))
                    'best_features': sorted_features_fisher,
                    'score': sorted_score,
                })

                best_features = sorted_features_fisher[0:n_components]
                dim_features = np.ones(len(best_features))

                # print(sorted_features_fisher)

                data_reduce = []
                for data in data_ref:
                    df = pd.DataFrame(data)
                    df.columns = list_features
                    data_reduce.append(df[best_features].values)

                model = ModelHMM()
                model.train(data_reduce, labels_ref, best_features,
                            dim_features)

                data_reduce = []
                for data in data_test:
                    df = pd.DataFrame(data)
                    df.columns = list_features
                    data_reduce.append(df[best_features].values)

                pred_labels, proba = model.test_model(data_reduce)

                F1_temp = []
                for i in range(len(labels_test)):
                    F1_temp.append(
                        tools.compute_F1_score(labels_test[i], pred_labels[i],
Beispiel #9
0
class ActivityRecognitionModule(yarp.RFModule):
	def __init__(self):
		yarp.RFModule.__init__(self)
		self.handlerPort = yarp.Port()

	def configure(self, rf):
		self.handlerPort.open("/ActivityRecognitionModule")
		self.attach(self.handlerPort)

		self.list_port = []
		self.cback = []

		path_model = rf.find('path_model').toString()
		name_model = rf.find('name_model').toString()
		self.statePort = yarp.BufferedPortBottle()
		self.statePort.open('/activity_recognition/state:o')
		self.probPort = yarp.BufferedPortBottle()
		self.probPort.open('/activity_recognition/probabilities:o')
		self.model = ModelHMM()
		self.model.load_model(path_model + '/' + name_model)
		self.list_states = self.model.get_list_states()
		self.buffer_model = [[]]

		size_buffer = int(rf.find('size_buffer').toString())

		signals = rf.findGroup("Signals").tail().toString().replace(')', '').replace('(', '').split(' ')
		nb_port = int(len(signals))
		nb_active_port = 0

		for signal in signals:
			info_signal = rf.findGroup(signal)
			is_enabled = int(info_signal.find('enable').toString())

			if(is_enabled):
				list_items = info_signal.findGroup('list').tail().toString().split(' ')
				input_port_name = info_signal.find('output_port').toString()

				if(input_port_name == ''):
					input_port_name = info_signal.find('input_port').toString()

				if((list_items[0] == 'all') or (list_items[0] == '')):
					self.list_port.append(yarp.BufferedPortBottle())
					self.list_port[nb_active_port].open("/activity_recognition" + input_port_name + ':i')
					self.cback.append(CallbackData(size_buffer))
					self.list_port[nb_active_port].useCallback(self.cback[nb_active_port])
					yarp.Network.connect("/processing" + input_port_name + ':o', self.list_port[nb_active_port].getName())
					nb_active_port += 1
					self.buffer_model.append([])

				else:
					for item in list_items:
						self.list_port.append(yarp.BufferedPortBottle())
						self.list_port[nb_active_port].open("/activity_recognition" + input_port_name + '/' + item + ':i')
						self.cback.append(CallbackData(size_buffer))
						self.list_port[nb_active_port].useCallback(self.cback[nb_active_port])
						self.buffer_model.append([])
						yarp.Network.connect("/processing" + input_port_name + '/' + item  + ':o', self.list_port[nb_active_port].getName())
						nb_active_port += 1


		self.flag_model = np.zeros(nb_active_port)
		self.obs = []

		return True



	def close(self):

		print('*** Closing the ports of this module ***')

		for port in self.list_port:
			port.close()

		self.statePort.close()
		self.probPort.close()
		self.handlerPort.close()

		print('Finished closing ports ')

		return True


	def interruptModule(self):

		print('*** Interrupt the ports of this module ***')

		for port in self.list_port:
			port.interrupt()

		self.statePort.interrupt()
		self.probPort.interrupt()
		self.handlerPort.interrupt()

		print('Finished interrupt ports ')

		return True





	def updateModule(self):
		data_model = []
		index = 0
		received_data = 0

		#print('DEBUG liste portes ', self.list_port)

		for port, i in zip(self.list_port, range(len(self.list_port))):

			#print('DEBUG port get name', port.getName())

			#read port data 
			data = self.cback[i].get_data()

			if(len(data)>0):
				self.buffer_model[i] = data
				self.flag_model[i] = 1

				dimension = len(data)
				if(i == 0):
					data_model = data
				else:
					data_model = np.concatenate((data_model, data))


		# if we read from all the ports, we launch the model recognition 
		if(np.sum(self.flag_model) == len(self.list_port)):
			self.flag_model = np.zeros(len(self.list_port))

			for i in range(len(self.list_port)):
				if(i == 0):
					data_model = self.buffer_model[i]
				else:
					data_model = np.concatenate((data_model, self.buffer_model[i]))

			# if we are here, we have all the data to input on our model 
			#print ('DEBUG DATA MODEL data in input to the model ', data_model)


			self.obs.append(data_model)
			if(len(self.obs) > 500):
				del self.obs[0]

			#print ('DEBUG OBSERVATIONS ', self.obs)


			if(len(self.obs) > 10):
				state = self.model.predict_states(self.obs)
				b_state = self.statePort.prepare()
				b_state.clear()
				b_state.addInt(1)
				index_state = int(state[-1])
				b_state.addInt(index_state)
				b_state.addString(self.list_states[index_state])
				self.statePort.write()

				probabilities = self.model.score_samples(self.obs)
				b_prob = self.probPort.prepare()
				b_prob.clear()

				for prob, index in zip(probabilities[-1], range(len(probabilities[-1]))):
					b_prob.addString(self.list_states[index])
					b_prob.addDouble(prob)
				self.probPort.write()

		return True

	def getPeriod(self):
		return 0.001
        F1_fisher = []
        F1_wrapper = []

        F1_f = []
        F1_w = []

        if (nbr_cross_val == 0):
            data_reduce = []
            for data in data_win:
                df = pd.DataFrame(data)
                df.columns = list_features
                data_reduce.append(df[best_features_wrapper].values)

            data_ref, labels_ref, data_test, labels_test, id_train, id_test = tools.split_data_base2(
                data_reduce, real_labels, ratio)
            model = ModelHMM()
            model.train(data_ref, labels_ref, best_features_wrapper,
                        dim_features)
            model.save_model('model', 'test_video_action', "load_handling")

            info_split = []

            for seq, num_seq in zip(info_sequences,
                                    range(len(info_sequences))):
                if (num_seq in id_train):
                    info_split.append('training')
                else:
                    info_split.append('testing')

            df = pd.DataFrame({'Sequence': info_sequences, 'Base': info_split})
Beispiel #11
0
            data_base = pd.DataFrame(data, columns=list_features)

            # time = data_base['timestamp']

            # labels, states = ref_data.load_labels_refGT(time, name_track, 'labels_3A')
            # ref_data.load_labels_ref(name_track, labels_folder)
            # labels = ref_data.get_real_labels(time)
            # states = ref_data.get_list_states()

            real_labels.append(labels)
            data_win2.append(data_base[best_features].as_matrix())
            timestamps.append(time)

            i += 1

    model = ModelHMM()
    model.load_model(path_model + 'test_' + name_track)
    list_states = model.get_list_states()

    print(list_states)

    predict_labels, proba = model.test_model(data_win2)

    real_labels = np.asarray(real_labels).T
    predict_labels = np.asarray(predict_labels).T

    # for i in range(len(predict_labels)):
    # 	print(predict_labels[i][0])
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Id", "Idle")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Re", "Reach")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Rl", "Release")
Beispiel #12
0
            del labels_reduce_posture[count[0]:count[-1] + 1]
            del data_reduce_detailted[count[0]:count[-1] + 1]
            del labels_reduce_detailed[count[0]:count[-1] + 1]
            del data_reduce_details[count[0]:count[-1] + 1]
            del labels_reduce_details[count[0]:count[-1] + 1]
            del data_reduce_action[count[0]:count[-1] + 1]
            del labels_reduce_action[count[0]:count[-1] + 1]

        else:
            n_subject = len(list_participant)

        for nbr_test in range(nbr_cross_val):
            data_ref, labels_ref, data_test, labels_test, id_train, id_test = tools.split_data_base2(
                data_reduce_posture, labels_reduce_posture, ratio)

            model = ModelHMM()
            model.train(data_ref, labels_ref, best_features_posture,
                        np.ones(len(best_features_posture)))

            labels_ref_details = []
            data_details_ref = []
            labels_ref_detailed_posture = []
            data_ref_detailed_posture = []
            labels_ref_action = []
            data_ref_action = []

            if (test_generalisation):
                for id_subject in id_train:
                    labels_ref_details.append(
                        labels_reduce_details[id_subject])
                    data_details_ref.append(data_reduce_details[id_subject])
Beispiel #13
0
            time = data_base['timestamps']

            labels, states = ref_data.load_labels_refGT(
                time, name_track, 'labels_3A')
            # ref_data.load_labels_ref(name_track, labels_folder)
            # labels = ref_data.get_real_labels(time)
            # states = ref_data.get_list_states()

            real_labels.append(labels)
            data_win2.append(data_base[best_features].as_matrix())
            timestamps.append(time)

            i += 1

    model = ModelHMM()
    model.load_model(path_model + 'test_video')
    list_states = model.get_list_states()

    predict_labels, proba = model.test_model(data_win2)

    real_labels = np.asarray(real_labels).T
    predict_labels = np.asarray(predict_labels).T

    # for i in range(len(predict_labels)):
    # 	print(predict_labels[i][0])
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Id", "Idle")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Re", "Reach")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Rl", "Release")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Fm", "Fine Manipulation")
    # 	predict_labels[i][0] = predict_labels[i][0].replace("Sc", "Screw")