Example #1
0
def print_results(number_of_gesture, filename, results, correct_gesture):
    print("###########################################################")
    print("File:", filename)
    for gesture in range(number_of_gesture):
        print('{:15s}\t{:10f}'.format(Gesture.gesture_to_string(gesture),
                                      results[gesture]))

    print()
    print("Correct:", Gesture.gesture_to_string(correct_gesture))
def compare_prerecorded_files():
    print("Compare pre-recorded tests")
    for file in DataUtility.TEST_FILE_LIST:
        print("#####################################################################################")
        print("File: " + file.filename)
        data_handler = DataHandlers.FileDataHandler(file)
        recognized_gesture = CompareMethods.cross_correlation_comparison(data_handler)

        print("Gesture: " + Gesture.gesture_to_string(file.gesture))
        print("Regcognized Gesture: " + Gesture.gesture_to_string(recognized_gesture))
        print("\n\n\n")
Example #3
0
def cross_correlation_comparison(gesture_input_data_handler):
    print("Sensor Ignored: ", end="")
    for sensor in range(Sensor.NUMBER_OF_SENSORS):
        if not is_sensor_on(sensor):
            print(Sensor.sensor_to_string(sensor) + ", ", end="")

    print("\b\b", "  ")
    print("Compare method: Cross Correlation\n")

    corr_rs = [0] * Constant.NUMBER_OF_GESTURES
    emg_corrs = [0] * Constant.NUMBER_OF_GESTURES

    for i in range(len(DataUtility.TRAINING_FILE_LIST)):
        training_data_file = DataUtility.TRAINING_FILE_LIST[i]
        print(training_data_file.filename, end="\r")
        sys.stdout.write("\033[K")  # clean line

        current_training_gesture = training_data_file.gesture

        gesture_training_data_handler = DataHandlers.FileDataHandler(
            training_data_file)

        for sensor in range(Sensor.NUMBER_OF_SENSORS):
            if not is_sensor_on(sensor):
                continue
            elif sensor == Sensor.EMG:
                emg_corrs[
                    current_training_gesture] += cross_correlation_compare_sensor_data(
                        gesture_input_data_handler.get_sensor_data(sensor),
                        gesture_training_data_handler.get_sensor_data(sensor),
                        sensor)
            else:
                corr_rs[
                    current_training_gesture] += cross_correlation_compare_sensor_data(
                        gesture_input_data_handler.get_sensor_data(sensor),
                        gesture_training_data_handler.get_sensor_data(sensor),
                        sensor)

    recognized_gesture = Gesture.NONE_GESTURE
    max_similarity = 0.0
    for gesture in range(Gesture.NUMBER_OF_GESTURES):
        similarity = corr_rs[gesture] + emg_corrs[gesture]
        print("{:10s}".format(Gesture.gesture_to_string(gesture)) + ": r = ",
              end="")
        print("%.8f" % similarity, end="")

        print(", IMU = ", end="")
        print("%.8f" % corr_rs[gesture], end="")

        print(", EMG = ", end="")
        print("%.8f" % emg_corrs[gesture])

        if (similarity > max_similarity):
            max_similarity = similarity
            recognized_gesture = gesture

    print("")
    return recognized_gesture
Example #4
0
def print_success_rate(success_list):
    print("###########################################################")
    print("Success Rate")
    success_sum = [0, 0]
    for i in range(len(success_list)):
        if success_list[i][0] != 0:
            print('{:15s}\t{:4d} of {:4d} -> {:.2f}'.format(
                Gesture.gesture_to_string(i), success_list[i][1],
                success_list[i][0],
                100 * success_list[i][1] / success_list[i][0]))
        success_sum[0] += success_list[i][1]
        success_sum[1] += success_list[i][0]

    print()
    print("Success rate: {:.2f}".format(100 * success_sum[0] / success_sum[1]))
    return (100 * success_sum[0] / success_sum[1])
Example #5
0
def raw_success_list(number_of_gesture, json_data):
    success_list = []
    for i in range(number_of_gesture):
        success_list.append([0, 0])

    for result_object in json_data:
        (filename, correct_gesture,
         results) = get_data_from_json_object(result_object)
        recognized_gesture = numpy.argmax(results)

        success_list[correct_gesture][0] += 1
        if correct_gesture == recognized_gesture:
            success_list[correct_gesture][1] += 1
        else:
            print_results(number_of_gesture, filename, results,
                          correct_gesture)
            print("Recognized:", Gesture.gesture_to_string(recognized_gesture))
            print()

    print_success_rate(success_list)
def live_gesture_recognition():
    print("Try Gesture")
    libmyo.init('../myo-sdk-win-0.9.0/bin')
    listener = DeviceListener.LiveGestureListener()
    hub = libmyo.Hub()
    hub.run(2000, listener)

    try:
        while True:
            print("#################################################################################\n", end="")
            print()
            try:
                input("press enter to continue...")
            except EOFError:
                time.sleep(1)

            listener.recording_on()
            while listener.is_recording:
                pass

            print()
            network_session = NeuralNetworkUtility.NeuralNetwork(NeuralNetwork.SESSION_FOLDERS, NeuralNetwork.DATA_HANDLER_TYPE, False)
            network_session.set_sess_path(default_session)
            network_session.get_network_meta_data()

            results = network_session.input_test_emg_network(listener.data_handler)
            network_session.print_results(results)

            recognized_gesture = numpy.argmax(results)
            print()
            print("Recognized:", Gesture.gesture_to_string(recognized_gesture))

    except KeyboardInterrupt:
        print('\nQuit')

    hub.shutdown()  # !! crucial
Example #7
0
 def print_results(self, results):
     for gesture in range(self.get_number_of_gesture()):
         print('{}) {:15s}\t{:10f}'.format(gesture, Gesture.gesture_to_string(gesture), results[gesture]))
Example #8
0
    def test_emg_network(self):
        self.get_network_meta_data()
        print("Session path:", self.sess_path)

        is_storeing_result = input("Write result to file (y/n)? ")
        if is_storeing_result == 'y':
            is_storeing_result = True
        else:
            is_storeing_result = False
        summary_list = []

        run_date = time.strftime("%Y-%m-%d-%H%M")

        number_of_gestures = self.get_number_of_gesture()
        if not self.is_hackathon:
            file_list = DataUtility.TEST_FILE_LIST
        else:
            file_list = HackathonDataNeuralNetwork.get_test_file_list(number_of_gestures)

        for test_file in file_list:
            data_handler = self.data_handler_type(test_file)
            data_handler.set_emg_wavelet_level(self.wavelet_level)
            data_handler.set_feature_functions_list(self.feature_function_check_list)

            start_time = time.time()
            results = self.input_test_emg_network(data_handler)
            end_time = time.time()

            recognized_gesture = numpy.argmax(results)

            print()
            print("###########################################################")
            self.print_results(results)
            print()
            print("Recognized:", Gesture.gesture_to_string(np.argmax(results)))

            print("Correct gesture:", Gesture.gesture_to_string(test_file.gesture))
            print("Analyse time: ", "%.2f" % float(end_time - start_time))

            summary_list.append((test_file.gesture, recognized_gesture))

            print()
            print("File:", test_file.filename)
            if is_storeing_result:
                self.write_result_to_file(results, test_file.filename, test_file.gesture, run_date)

        print("#############################################################")
        print("Session path:", self.sess_path)
        print("Summary List")

        number_of_gestures = self.get_number_of_gesture()

        success_list = []
        for i in range(number_of_gestures):
            success_list.append([0, 0])

        for correct_gesture, recognized_gesture in summary_list:
            success_list[correct_gesture][0] += 1

            if correct_gesture == recognized_gesture:
                success_list[correct_gesture][1] += 1

            print(Gesture.gesture_to_string(correct_gesture), " -> ", Gesture.gesture_to_string(recognized_gesture))

        print()
        print("#############################################################")
        print("Success Rate")
        for i in range(number_of_gestures):
            if success_list[i][0] != 0:
                print('{:15s}\t{:4d} of {:4d} -> {:.2f}'.format(Gesture.gesture_to_string(i), success_list[i][1], success_list[i][0], 100 * success_list[i][1] / success_list[i][0]))

        input("Press Enter to continue...")
Example #9
0
def filtered_analyse(number_of_gesture, json_data):
    success_list = []
    for i in range(number_of_gesture):
        success_list.append([0, 0])

    gesture_margin = input_parameter("Gesture margin (default = 1): ", 1, 1,
                                     number_of_gesture)
    diff_margin = input_parameter("Difference margin (default = 0.0): ",
                                  0.0,
                                  0.0,
                                  1.0,
                                  is_int=False)
    value_treshold = input_parameter("Value treshold (default = 0.0): ",
                                     0.0,
                                     0.0,
                                     1.0,
                                     is_int=False)

    for result_object in json_data:
        (filename, correct_gesture,
         results) = get_data_from_json_object(result_object)

        is_success = False

        copy_results = copy.deepcopy(results)
        possible_gesture_results = []
        possible_gestures = []
        for i in range(gesture_margin):
            possible_gesture = numpy.argmax(copy_results)
            possible_gestures.append(possible_gesture)

            possible_gesture_result = copy_results[possible_gestures[-1]]
            copy_results[possible_gestures[-1]] = -1
            possible_gesture_results.append(possible_gesture_result)

        while numpy.amin(possible_gesture_results) - numpy.amax(
                copy_results) < diff_margin:
            i = numpy.argmin(possible_gesture_results)
            possible_gesture_results.pop(i)
            possible_gestures.pop(i)
            if not len(possible_gesture_results):
                break

        i = 0
        number_of_possible_gestures = len(possible_gestures)
        while i < number_of_possible_gestures:
            if possible_gesture_results[i] >= value_treshold:
                i += 1
            else:
                possible_gesture_results.pop(i)
                possible_gestures.pop(i)
                number_of_possible_gestures -= 1

        for i in range(number_of_possible_gestures):
            if possible_gesture_results[
                    i] > value_treshold and possible_gestures[
                        i] == correct_gesture:
                is_success = True

        success_list[correct_gesture][0] += 1
        if is_success:
            success_list[correct_gesture][1] += 1
        else:
            print_results(number_of_gesture, filename, results,
                          correct_gesture)
            print("Recognized:",
                  [Gesture.gesture_to_string(x) for x in possible_gestures])

    success_rate = print_success_rate(success_list)
    print("${}$ & ${:.2f}$ & ${:.2f}$ & ${:.2f}$".format(
        gesture_margin, diff_margin, value_treshold, success_rate))
def create_gesture_files_for_notme():
    libmyo.init('../myo-sdk-win-0.9.0/bin')
    listener = DeviceListener.LiveGestureListener()
    listener.expand_data_length(time_margin=1.5)
    hub = libmyo.Hub()
    hub.run(2000, listener)

    print("Create record data set")

    last_file = None
    try:
        person_id = input("Person ID-number: ")
        while True:
            print()
            print("#################################################################################\n", end="")

            try:
                a = input("Enter 'r' to remove last file, or press enter to continue... ")
            except EOFError:
                time.sleep(1)
            if a == "r":
                gesture_recorded = Gesture.NUMBER_OF_GESTURES
            else:
                listener.recording_on()
                sleep(2.0)
                while listener.is_recording:
                    pass

                folder_path = "../data/raw_files/notme_set/"

                network_session = NeuralNetworkUtility.NeuralNetwork(NeuralNetwork.SESSION_FOLDERS, NeuralNetwork.DATA_HANDLER_TYPE, False)
                network_session.set_sess_path(default_session)
                network_session.get_network_meta_data()

                results = network_session.input_test_emg_network(listener.data_handler)
                recognized_gesture = numpy.argmax(results)

                print()
                print("###########################################################")
                print()
                network_session.print_results(results)

                # Print number to gesture table
                print()
                print("{})".format(Gesture.NUMBER_OF_GESTURES), "remove last file...")
                print("{})".format(Gesture.NUMBER_OF_GESTURES + 1), "continue...")

                print()
                print("Recognized:", Gesture.gesture_to_string(recognized_gesture))

                Freq = 1500  # Set Frequency To 2500 Hertz
                Dur = 500  # Set Duration To 1000 ms == 1 second
                winsound.Beep(Freq, Dur)

                gesture_recorded = -1
                while gesture_recorded < 0 or gesture_recorded >= Gesture.NUMBER_OF_GESTURES + 2:
                    gesture_recorded = input("Correction: ")
                    if gesture_recorded == "":
                        gesture_recorded = recognized_gesture
                    elif not Utility.is_int_input(gesture_recorded):
                        gesture_recorded = -1
                    else:
                        gesture_recorded = int(gesture_recorded)

            if(gesture_recorded == Gesture.NUMBER_OF_GESTURES):
                if last_file is not None:
                    os.remove(last_file.get_file_path())
                    print("Removed file:", last_file.filename)
                    last_file = None
                else:
                    print("No last file, remove it manually")
                continue
            if(gesture_recorded == Gesture.NUMBER_OF_GESTURES + 1):
                continue

            gesture_file_count_list = DataUtility.get_gesture_file_count_in_folder(folder_path)
            file_number = gesture_file_count_list[gesture_recorded]

            filename = person_id + "-NotMe-" + Gesture.gesture_to_string(gesture_recorded) + "-" + str(file_number) + ".json"

            last_file = listener.data_handler.create_json_file2(filename, folder_path)

    except KeyboardInterrupt:
        print('\nQuit')

    hub.shutdown()  # !! crucial