Пример #1
0
def evaluating_fast_nn():
    print('Initializing evaluating fast nn')

    classes = 8
    hidden_layers = 40
    instance_nn = ClassNN(model_dir=ClassNN.model_dir_pose, classes=classes, hidden_number=hidden_layers)
    instance_pose = ClassOpenPose()

    info = ClassDescriptors.load_images_comparision_ext(instance_pose, min_score=0.05, load_one_img=True)
    pose1 = info['pose1']

    items = ClassDescriptors.get_person_descriptors(pose1, 0.05)

    # Valid pose for detection
    data_to_add = list()
    data_to_add += items['angles']
    data_to_add += ClassUtils.get_flat_list(items['transformedPoints'])

    data_np = np.asanyarray(data_to_add, dtype=np.float)
    result = instance_nn.predict_model_fast(data_np)
    print(result)
    key_pose = result['classes']
    probability = result['probabilities'][key_pose]

    print('Key pose: {0}'.format(key_pose))
    print('Probability: {0}'.format(probability))
    print('Done!')
Пример #2
0
def train(train_data, train_labels):
    # Loading instances
    classes = 10
    hidden_layers = 50
    instance_nn = ClassNN(model_dir='/tmp/model', classes=classes, hidden_number=hidden_layers)

    print('Training...')
    instance_nn.train_model(train_data, train_labels)
    print('Done Training')
Пример #3
0
def train_model(train_data_np: np.ndarray, train_labels_np: np.ndarray,
                eval_data_np: np.ndarray, eval_labels_np: np.ndarray,
                instance_nn_train: ClassNN, steps):

    print('Training model into list')

    # Init training!
    # instance_train.update_batch_size(training_data_np.shape[0])
    start = time.time()
    instance_nn_train.train_model(train_data_np, train_labels_np, steps=steps)
    end = time.time()

    # Performing data evaluation
    eval_model(eval_data_np, eval_labels_np, instance_nn_train)
def cnn_reprocess_images():
    print('Re processing images')
    list_folders = list()
    list_folders.append(ClassUtils.cnn_class_folder)

    # Loading instances
    instance_nn = ClassNN(ClassNN.model_dir_pose, classes_number, hidden_layers)

    # File walk
    count = 0
    for folder in list_folders:
        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                extension = ClassUtils.get_filename_extension(full_path)

                if extension == '.json':
                    print('Processing file: {0}'.format(full_path))

                    with open(full_path, 'r') as f:
                        json_txt = f.read()

                    json_data = json.loads(json_txt)
                    list_poses = json_data['listPoses']

                    for pose in list_poses:
                        angles = pose['angles']
                        transformed_points = pose['transformedPoints']

                        list_desc = list()
                        list_desc += angles
                        list_desc += ClassUtils.get_flat_list(transformed_points)

                        list_desc_np = np.asanyarray(list_desc, dtype=np.float)

                        res = instance_nn.predict_model_fast(list_desc_np)
                        pose['keyPose'] = int(res['classes'])
                        pose['probability'] = 1

                    # Writing data again
                    data_txt = json.dumps(json_data, indent=2)
                    with open(full_path, 'w') as f:
                        f.write(data_txt)

                    count += 1

    print('Done')
    print('Total files processed: {0}'.format(count))
Пример #5
0
def main():
    print('Initializing Main Function')

    # Initialize classifier instance - NN
    classes_number = 10
    hidden_number = 60
    learning_rate = 0.04
    nn_classifier = ClassNN(model_dir=ClassNN.model_dir_pose,
                            classes=classes_number,
                            hidden_number=hidden_number,
                            learning_rate=learning_rate)

    # Initialize classifier instance - SVM
    svm_classifier = ClassSVM(ClassSVM.path_model_pose)

    res = input('Press 1 to train using NN - 2 to train using SVM - 3 to train all: ')
    if res == '1':
        calculate_poses(Option.NN, nn_classifier, svm_classifier)
    elif res == '2':
        calculate_poses(Option.SVM, nn_classifier, svm_classifier)
    elif res == '3':
        # Train all
        calculate_poses(Option.NN, nn_classifier, svm_classifier)
        calculate_poses(Option.SVM, nn_classifier, svm_classifier)
    else:
        raise Exception('Option not recognized: {0}'.format(res))
def get_poses_seq(folder: str,
                  instance_nn: ClassNN,
                  instance_pose: ClassOpenPose,
                  only_json=False):
    # List all folders
    list_files = []
    for file in os.listdir(folder):
        list_files.append(os.path.join(folder, file))

    # Sorting elements
    list_files.sort()

    # Get elements
    list_desc = list()
    for path in list_files:
        ext = ClassUtils.get_filename_extension(path)
        if only_json:
            if ext != '.json':
                print('Ignoring file: {0}'.format(path))
                continue

            with open(path, 'r') as file:
                person_arr_str = file.read()
                person_arr = json.loads(person_arr_str)
        else:
            if ext != '.jpg':
                print('Ignoring file: {0}'.format(path))
                continue

            print('Processing file {0}'.format(path))
            image = cv2.imread(path)

            arr = instance_pose.recognize_image(image)

            arr_pass = []
            for person_arr in arr:
                if ClassUtils.check_vector_integrity_part(
                        person_arr, min_score):
                    arr_pass.append(person_arr)

            if len(arr_pass) != 1:
                print('Ignoring file {0} - Len arr_pass: {1}'.format(
                    path, len(arr_pass)))
                continue

            person_arr = arr_pass[0]

        result_desc = ClassDescriptors.get_person_descriptors(
            person_arr, min_score)
        list_desc.append(result_desc['fullDesc'])

    list_desc_np = np.asarray(list_desc, np.float)
    results = instance_nn.predict_model_array(list_desc_np)

    list_classes = []
    for result in results:
        list_classes.append(result['classes'])

    return list_classes
def cnn_image_generation_folder():
    # Initializing instance nn

    list_folders = list()
    list_folders.append(ClassUtils.cnn_class_folder)
    instance_nn = ClassNN(ClassNN.model_dir_pose, classes_number, hidden_layers)

    # File walk
    for folder in list_folders:
        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                extension = ClassUtils.get_filename_extension(full_path)

                if extension == '.json':
                    print('Processing: {0}'.format(full_path))

                    if 'ori' in full_path:
                        raise Exception('Full path contains ori folder!')

                    with open(full_path, 'r') as f:
                        json_txt = f.read()

                    json_data = json.loads(json_txt)

                    # All image generation
                    image_name_pos = ClassUtils.get_filename_no_extension(full_path) + '_p.bmp'
                    image_name_angle = ClassUtils.get_filename_no_extension(full_path) + '_a.bmp'
                    image_name_pose = ClassUtils.get_filename_no_extension(full_path) + '_s.bmp'
                    image_name_angles_black = ClassUtils.get_filename_no_extension(full_path) + '_b.bmp'
                    image_name_pos_black = ClassUtils.get_filename_no_extension(full_path) + '_o.bmp'
                    image_name_pos_black_rem = ClassUtils.get_filename_no_extension(full_path) + '_r.bmp'

                    image_res_pos = create_cnn_image_pos(json_data)
                    image_res_angle = create_cnn_image_angles(json_data)
                    image_res_pose = create_cnn_image_pose(json_data, instance_nn)
                    image_res_angles_black = create_image_cnn_angles_black(json_data)
                    image_res_pos_black = create_cnn_image_pos_black(json_data)
                    image_res_pos_black_rem = create_cnn_image_pos_black_rem(json_data)

                    # print('Writing image pos: {0}'.format(image_name_pos))
                    cv2.imwrite(image_name_pos, image_res_pos)

                    # print('Writing image angle: {0}'.format(image_name_angle))
                    cv2.imwrite(image_name_angle, image_res_angle)

                    # print('Writing image pose: {0}'.format(image_name_pose))
                    cv2.imwrite(image_name_pose, image_res_pose)

                    # print('Writing image angles black: {0}'.format(image_name_angles_black))
                    cv2.imwrite(image_name_angles_black, image_res_angles_black)

                    # print('Writing image pos black: {0}'.format(image_name_pos_black))
                    cv2.imwrite(image_name_pos_black, image_res_pos_black)

                    print('Writing image pos black rem: {0}'.format(image_name_pos_black_rem))
                    cv2.imwrite(image_name_pos_black_rem, image_res_pos_black_rem)

    print('Done!')
Пример #8
0
def evaluating_fast(eval_data):
    print('Evaluating...')

    # Loading instances
    classes = 10
    hidden_layers = 50
    instance_nn = ClassNN(model_dir='/tmp/model', classes=classes, hidden_number=hidden_layers)

    data = instance_nn.predict_model_fast(eval_data[0])
    print(data)

    data = instance_nn.predict_model_fast(eval_data[1])
    print(data)

    start = time.time()
    data = instance_nn.predict_model_fast(eval_data[3])
    end = time.time()
    print(data)
    print('Time elapsed: {0}'.format(end - start))

    start = time.time()
    data = instance_nn.predict_model_fast(eval_data[0])
    end = time.time()

    print(data)
    print('Time elapsed: {0}'.format(end - start))

    print('Done evaluating fast')
Пример #9
0
def main():
    print('Initializing main function')

    # Initializing instances
    classes = len(list_folder_data)
    instance_nn_train = ClassNN(ClassNN.model_dir_action, classes,
                                hidden_number)
    instance_nn_pose = ClassNN(ClassNN.model_dir_pose, pose_classes,
                               pose_hidden_number)

    res = input(
        'Press 1 to work with poses, press 2 to work with all - press 3 to work with points '
    )

    if res == '1':
        load_descriptors(instance_nn_train, instance_nn_pose, Desc.POSES)
    elif res == '2':
        load_descriptors(instance_nn_train, instance_nn_pose, Desc.ALL)
    elif res == '3':
        load_descriptors(instance_nn_train, instance_nn_pose, Desc.POINTS)
    else:
        print('Option not recognized: {0}'.format(res))
Пример #10
0
def eval_model(eval_data_np: np.ndarray, eval_labels_np: np.ndarray,
               instance_nn_train: ClassNN):
    classes = len(list_folder_data)

    # Evaluate
    instance_nn_train.eval_model(eval_data_np, eval_labels_np)

    # Getting confussion matrix
    print('Getting confusion matrix')

    confusion_np = np.zeros((classes, classes))
    for i in range(eval_data_np.shape[0]):
        data = eval_data_np[i]
        expected = eval_labels_np[i]
        obtained = instance_nn_train.predict_model_fast(data)
        class_prediction = obtained['classes']

        confusion_np[expected, class_prediction] += 1

    print('Confusion matrix')
    print(confusion_np)

    print('Done!')
Пример #11
0
def calculate_poses(option: Option, nn_classifier: ClassNN, svm_classifier: ClassSVM):
    print('Calculating poses using nn')

    # Recalculate all poses and get confidence
    for classInfo in list_classes:
        folder = classInfo['folderPath']
        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                ext = ClassUtils.get_filename_extension(full_path)

                if '_rawdata' in file and ext == '.json':
                    print('Processing file: {0}'.format(full_path))

                    with open(full_path, 'r') as f:
                        file_txt = f.read()

                    file_json = json.loads(file_txt)
                    list_poses = file_json['listPoses']

                    for pose in list_poses:
                        angles = pose['angles']
                        transformed_points = pose['transformedPoints']

                        # Using transformed points and angles
                        list_desc = list()
                        list_desc += ClassUtils.get_flat_list(transformed_points)

                        # Convert to numpy
                        list_desc_np = np.asanyarray(list_desc, np.float)

                        if option == Option.NN:
                            result = nn_classifier.predict_model_fast(list_desc_np)
                        else:
                            result = svm_classifier.predict_model(list_desc_np)

                        pose['class'] = int(result['classes'])
                        pose['probabilities'] = result['probabilities'].tolist()

                    # Writing again into file
                    file_txt = json.dumps(file_json, indent=4)
                    new_full_path = ClassUtils.change_ext_training(full_path,
                                                                   '{0}_posedata'.format(option.value))

                    with open(new_full_path, 'w') as f:
                        f.write(file_txt)

                    # Done

    print('Done processing elements')
def main():
    print('Initializing main function')

    # Initializing instances
    instance_pose = ClassOpenPose()

    folder_training_1 = '/home/mauricio/Pictures/Poses/Walk_Front'
    folder_training_2 = '/home/mauricio/Pictures/Poses/Vehicle'
    folder_training_3 = '/home/mauricio/Pictures/Poses/Tires'

    data_1 = get_sets_folder(folder_training_1, 0, instance_pose)
    data_2 = get_sets_folder(folder_training_2, 1, instance_pose)
    data_3 = get_sets_folder(folder_training_3, 1, instance_pose)

    data_training = np.asarray(data_1[0] + data_2[0] + data_3[0],
                               dtype=np.float)
    label_training = np.asarray(data_1[1] + data_2[1] + data_3[1],
                                dtype=np.int)

    data_eval = np.asarray(data_1[2] + data_2[2] + data_3[2], dtype=np.float)
    label_eval = np.asarray(data_1[3] + data_2[3] + data_3[3], dtype=np.int)

    print(data_training)
    print(label_training)

    print('Len data_training: {0}'.format(data_training.shape))
    print('Len data_eval: {0}'.format(data_eval.shape))

    classes = 3
    hidden_neurons = 15
    model_dir = '/tmp/nnposes/'

    instance_nn = ClassNN(model_dir, classes, hidden_neurons)

    option = input(
        'Press 1 to train the model, 2 to eval, otherwise to predict')

    if option == '1':
        print('Training the model')

        # Delete previous folder to avoid conflicts in the training process
        if os.path.isdir(model_dir):
            # Removing directory
            shutil.rmtree(model_dir)

        instance_nn.train_model(data_training, label_training)
    elif option == '2':
        print('Eval the model')
        instance_nn.eval_model(data_eval, label_eval)
    else:
        print('Not implemented!')

    print('Done!')
Пример #13
0
def main():
    print('Initializing main function')
    print('Loading datasets')

    train_data, train_labels = ClassImageDataSet.load_train_mnist()
    eval_data, eval_labels = ClassImageDataSet.load_eval_mnist()

    print('PCA with training data')
    n_features = 18

    pca = PCA(n_components=n_features, svd_solver='randomized').fit(train_data)
    train_pca = pca.transform(train_data)
    n_classes = 10
    hidden_neurons = 100
    eval_pca = pca.transform(eval_data)

    print('Printing shapes')
    print(train_data.shape)
    print(train_pca.shape)
    model_dir = '/tmp/model_example_pca'
    classifier = ClassNN(model_dir, n_classes, hidden_neurons)

    var = input('Set 1 to train, 2 to predict. Otherwise to eval ')

    if var == '1':
        print('Training model')
        classifier.train_model(train_pca, train_labels)
    elif var == '2':
        print('Predict model')
        print('Total elements: ' + str(eval_pca.shape[0]))
        index = 1100
        eval_item = eval_pca[index]
        print(eval_item.shape)

        result = classifier.predict_model(eval_item)
        print('Result obtained: ' + str(result['classes']))
        print('Print probabilities')
        print(result['probabilities'])

        print('Real result: ' + str(eval_labels[index]))
    else:
        print('Evaluating model')
        classifier.eval_model(eval_pca, eval_labels)

    print('Done!')
Пример #14
0
def main():
    print('Initializing main function')
    print('Warning - You must convert to mjpegx first')

    # Withdrawing Tkinter window
    Tk().withdraw()

    init_dir = '/home/mauricio/Videos/Oviedo/'
    options = {'initialdir': init_dir}

    folder = filedialog.askdirectory(**options)

    if folder is None:
        print('Folder not selected')
    else:
        print(folder)

        # Initializing pose instance
        instance_nn_pose = ClassNN(model_dir=ClassNN.model_dir_pose,
                                   classes=ClassNN.classes_num_pose,
                                   hidden_number=ClassNN.hidden_num_pose)

        print('Extracting all mjpegx files')

        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                extension = ClassUtils.get_filename_extension(full_path)

                if extension == '.mjpegx':
                    print(file)
                    print('Reprocessing ' + full_path + ' to mjpegx')
                    camera_number = ClassUtils.get_cam_number_from_path(
                        full_path)
                    ClassMjpegConverter.convert_video_mjpegx(
                        full_path, camera_number, instance_nn_pose)
def main():
    print('Initializing main function')

    # Withdrawing tkinter

    # Loading model dirs
    list_folder_data = [
        ('/home/mauricio/CNN/Classes/Door', 0.05),
        ('/home/mauricio/CNN/Classes/Tires', 0.05),
        ('/home/mauricio/CNN/Classes/Walk', 0.05),
    ]

    list_hmm = []

    for folder_data in list_folder_data:
        label_name = get_label_from_folder(folder_data[0])
        full_model_dir = os.path.join(hnn_model_folder,
                                      '{0}.pkl'.format(label_name))
        list_hmm.append(ClassHMM(full_model_dir))

    # Initializing instances
    instance_pose = ClassOpenPose()
    instance_nn = ClassNN.load_from_params(nn_model_dir)

    option = input('Select 1 to train, 2 to eval hmm, 3 to preprocess: ')

    if option == '1':
        print('Train hmm selection')
        train_hmm(list_folder_data, list_hmm, instance_nn, instance_pose)
    elif option == '2':
        eval_hmm(list_folder_data, list_hmm, instance_nn, instance_pose)
    elif option == '3':
        recalculate = False
        pre_process_images(instance_pose, list_folder_data, recalculate)
    else:
        print('Invalid argument: {0}'.format(option))
def main():
    print('Initializing main function')

    # Prompt for user input
    cam_number_str = input('Insert camera number to process: ')
    cam_number = int(cam_number_str)

    # Open video from opencv
    cap = cv2.VideoCapture(cam_number)

    # Initializing open pose distance
    instance_pose = ClassOpenPose()

    # Initializing variables
    model_dir = '/home/mauricio/models/nn_classifier'
    instance_nn = ClassNN.load_from_params(model_dir)

    while True:
        # Capture frame-by-frame
        ret, frame = cap.read()

        # Processing frame with openpose
        arr, frame = instance_pose.recognize_image_tuple(frame)

        # Check if there is one frame with vector integrity
        arr_pass = list()

        min_score = 0.05
        # Checking vector integrity for all elements
        # Verify there is at least one arm and one leg
        for elem in arr:
            if ClassUtils.check_vector_integrity_pos(elem, min_score):
                arr_pass.append(elem)

        if len(arr_pass) != 1:
            print('Invalid len for arr_pass: {0}'.format(arr_pass))
        else:
            person_array = arr_pass[0]

            # Getting person descriptors
            results = ClassDescriptors.get_person_descriptors(
                person_array, min_score)

            # Descriptors
            data_to_add = results['angles']
            data_to_add += ClassUtils.get_flat_list(
                results['transformedPoints'])

            data_np = np.asanyarray(data_to_add, dtype=np.float)

            # Getting result predict
            result_predict = instance_nn.predict_model(data_np)
            detected_class = result_predict['classes']

            label_class = get_label_name(instance_nn.label_names,
                                         detected_class)
            print('Detected: {0} - Label: {1}'.format(detected_class,
                                                      label_class))

            # Draw text class into image - Evaluation purposes
            font = cv2.FONT_HERSHEY_SIMPLEX
            font_scale = 0.6
            font_color = (255, 255, 255)
            line_type = 2

            cv2.putText(frame, '{0}'.format(label_class), (0, 0), font,
                        font_scale, font_color, line_type)

        # Display the resulting frame
        cv2.imshow('frame', frame)
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

    # When everything done, release the capture
    cap.release()
    cv2.destroyAllWindows()

    print('Done!')
Пример #17
0
def train_nn_cnn(training_list_poses, training_labels, eval_list_poses,
                 eval_labels, option, base_data):
    print('Init NN Training')

    if option != Option.NN and option != option.CNN:
        raise Exception('Option not valid: {0}'.format(option))

    # Training labels
    list_descriptors = list()
    for index_pose in range(len(training_list_poses)):
        list_poses = training_list_poses[index_pose]

        if option == Option.NN:
            descriptor = get_nn_descriptor(list_poses)
        else:
            descriptor = get_cnn_descriptor_pos(list_poses)

        list_descriptors.append(descriptor)

    training_descriptors_np = np.asanyarray(list_descriptors, dtype=np.float)
    training_labels_np = np.asanyarray(training_labels, dtype=np.int)

    # Eval labels
    list_descriptors = list()
    for index_pose in range(len(eval_list_poses)):
        list_poses = eval_list_poses[index_pose]

        if option == Option.NN:
            descriptor = get_nn_descriptor(list_poses)
        else:
            descriptor = get_cnn_descriptor_pos(list_poses)

        list_descriptors.append(descriptor)

    eval_descriptors_np = np.asanyarray(list_descriptors, dtype=np.float)
    eval_labels_np = np.asanyarray(eval_labels, dtype=np.int)

    # Initializing training instance
    classes = len(list_classes_classify)

    if option == Option.NN:
        hidden_number = 50
        instance_model = ClassNN(ClassNN.model_dir_action, classes,
                                 hidden_number)
    else:
        instance_model = ClassCNN(ClassCNN.model_dir_action,
                                  classes,
                                  cnn_image_height,
                                  cnn_image_height,
                                  depth,
                                  batch_size=32,
                                  train_steps=15000)

    print('Training nn model')
    instance_model.train_model(training_descriptors_np, training_labels_np)

    print('Model trained - Evaluating')
    accuracy = instance_model.eval_model(eval_descriptors_np, eval_labels_np)

    # Evaluating all elements
    for folder_info in list_classes:
        folder = folder_info['folderPath']
        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                if '_{0}_partialdata'.format(base_data) in full_path:
                    process_file_action(full_path,
                                        option,
                                        instance_model=instance_model,
                                        accuracy=accuracy)
Пример #18
0
def classify_images(list_folder_data: list, type_desc: EnumDesc):
    classes_number = 0

    cont = True
    while cont:
        cont = False

        for folder_data in list_folder_data:
            if folder_data[2] == classes_number:
                classes_number += 1
                cont = True
                break

    hidden_number = 60
    learning_rate = 0.005
    steps = 20000

    # Initialize classifier instance
    nn_classifier = ClassNN(model_dir=ClassNN.model_dir_pose,
                            classes=classes_number,
                            hidden_number=hidden_number,
                            learning_rate=learning_rate)

    results = ClassLoadDescriptors.load_pose_descriptors(type_desc)

    training_data_np = results['trainingData']
    training_labels_np = results['trainingLabels']
    eval_data_np = results['evalData']
    eval_labels_np = results['evalLabels']
    training_files_np = results['trainingFiles']
    eval_files_np = results['evalFiles']
    label_names = results['labelNames']

    # Prompt for user input
    selection = input('Training selected {0}. '
                      'Press 1 to train, 2 to evaluate, 3 to predict, 4 to save csv, '
                      '5 to get confusion matrix: '.format(type_desc))

    if selection == '1':
        # Training
        nn_classifier.train_model(train_data=training_data_np,
                                  train_labels=training_labels_np,
                                  label_names=label_names,
                                  steps=steps)

        # Evaluate after training
        nn_classifier.eval_model(eval_data_np, eval_labels_np)
    elif selection == '2':
        # Evaluate
        nn_classifier.eval_model(eval_data_np, eval_labels_np)
    elif selection == '3':
        # Predict
        # Select data to eval
        data_eval = eval_data_np[0]
        label_eval = eval_labels_np[0]

        results = nn_classifier.predict_model(data_eval)
        print('Predict data np: {0}'.format(results))
        print('Expected data np: {0}'.format(label_eval))
    elif selection == '4':
        # Saving file in csv
        total_data = np.concatenate((training_data_np, eval_data_np), axis=0)
        total_labels = np.concatenate((training_labels_np, eval_labels_np), axis=0)
        total_files = np.concatenate((training_files_np, eval_files_np))

        # Add new axis to allow concatenation
        total_labels = total_labels[:, np.newaxis]
        total_files = total_files[:, np.newaxis]

        total_np = np.concatenate((total_data, total_labels), axis=1)

        print('Saving data to CSV in file {0}'.format(csv_dir))
        np.savetxt(csv_dir, total_np, delimiter=',', fmt='%10.10f')
        np.savetxt(csv_dir_files, total_files, delimiter=',', fmt='%s')

        print('Saving training data')
        # Concatenate with new axis
        total_np_train = np.concatenate((training_data_np, training_labels_np[:, np.newaxis]), axis=1)
        total_np_eval = np.concatenate((eval_data_np, eval_labels_np[:, np.newaxis]), axis=1)

        # Saving
        np.savetxt(csv_train, total_np_train, delimiter=',', fmt='%10.10f')
        np.savetxt(csv_eval, total_np_eval, delimiter=',', fmt='%10.10f')

        print('Done writing file in CSV')
    elif selection == '5':
        print('Getting confusion matrix')

        confusion_np = np.zeros((classes_number, classes_number))
        for i in range(eval_data_np.shape[0]):
            data = eval_data_np[i]
            expected = eval_labels_np[i]
            obtained = nn_classifier.predict_model_fast(data)
            class_prediction = obtained['classes']
            print('Class: {0}'.format(class_prediction))

            confusion_np[expected, class_prediction] += 1

        print('Confusion matrix')
        print(confusion_np)
        print('Labels: {0}'.format(label_names))
    else:
        raise Exception('Option not supported')
Пример #19
0
def load_descriptors(instance_nn_train: ClassNN, instance_nn_pose: ClassNN,
                     pose_type: Desc):
    training_data = list()
    training_labels = list()
    eval_data = list()
    eval_labels = list()
    training_files = list()
    eval_files = list()

    for index, item in enumerate(list_folder_data):
        folder = item['folderPath']
        label = item['label']

        print('Processing folder path: {0}'.format(folder))

        num_file = 0
        list_paths = list()
        for root, _, files in os.walk(folder):
            for file in files:
                full_path = os.path.join(root, file)
                extension = ClassUtils.get_filename_extension(full_path)

                if extension == '.json':
                    list_paths.append(full_path)

        total_samples = len(list_paths)
        total_train = int(total_samples * 80 / 100)

        # Shuffle samples
        random.Random(seed).shuffle(list_paths)

        for full_path in list_paths:
            # Reading data
            with open(full_path, 'r') as f:
                json_txt = f.read()

            json_data = json.loads(json_txt)

            list_poses = json_data['listPoses']

            # Sampling data
            descriptor = list()
            for index_size in range(samples_size):
                index_pose = int(len(list_poses) * index_size / samples_size)
                pose = list_poses[index_pose]
                transformed_points = pose['transformedPoints']
                angles = pose['angles']

                list_desc = list()
                list_desc += angles
                list_desc += ClassUtils.get_flat_list(transformed_points)

                if pose_type == Desc.POSES:
                    list_desc_np = np.asanyarray(list_desc, dtype=np.float)
                    res = instance_nn_pose.predict_model_fast(list_desc_np)

                    # Add descriptor with probabilities
                    for elem in res['probabilities']:
                        descriptor.append(elem)
                elif pose_type == Desc.ALL:
                    for elem in list_desc:
                        descriptor.append(elem)
                elif pose_type == Desc.POINTS:
                    list_flat = ClassUtils.get_flat_list(transformed_points)
                    for elem in list_flat:
                        descriptor.append(elem)
                else:
                    raise Exception(
                        'Pose type not recognized: {0}'.format(pose_type))

            if num_file < total_train:
                training_data.append(descriptor)
                training_labels.append(label)
                training_files.append(full_path)
            else:
                eval_data.append(descriptor)
                eval_labels.append(label)
                eval_files.append(full_path)

            num_file += 1

    # Convert data to numpy array
    training_data_np = np.asanyarray(training_data, dtype=np.float)
    training_labels_np = np.asanyarray(training_labels, dtype=int)

    eval_data_np = np.asanyarray(eval_data, dtype=np.float)
    eval_labels_np = np.asanyarray(eval_labels, dtype=int)

    print('Shape images training: {0}'.format(training_data_np.shape))
    print('Shape labels training: {0}'.format(training_labels_np.shape))

    if training_data_np.shape[0] == 0:
        raise Exception('No files found!')

    res = input('Press 1 to train - 2 to eval: ')

    if res == '1':
        train_model(training_data_np,
                    training_labels_np,
                    eval_data_np,
                    eval_labels_np,
                    instance_nn_train,
                    steps=30000)
    elif res == '2':
        eval_model(eval_data_np, eval_labels_np, instance_nn_train)
    else:
        raise Exception('Option not implemented!')
Пример #20
0
def main():
    print('Initializing main function')

    # Initializing instances
    instance_pose = ClassOpenPose()
    instance_net = ClassNN.load_from_params(model_dir)

    # Withdrawing list
    Tk().withdraw()

    # Select directory to process
    init_dir = '/home/mauricio/CNN/Images'
    options = {'initialdir': init_dir}
    dir_name = filedialog.askdirectory(**options)

    if not dir_name:
        print('Directory not selected')
    else:
        # Loading images
        list_files = os.listdir(dir_name)
        list_files.sort()

        desc_list = list()

        for file in list_files:
            full_path = os.path.join(dir_name, file)

            print('Processing image {0}'.format(full_path))
            image = cv2.imread(full_path)
            arr = instance_pose.recognize_image(image)

            arr_pass = list()
            for person_arr in arr:
                if ClassUtils.check_vector_integrity_part(
                        person_arr, min_pose_score):
                    arr_pass.append(person_arr)

            if len(arr_pass) != 1:
                print('Invalid len {0} for image {1}'.format(
                    len(arr_pass), full_path))
                continue
            else:
                result_des = ClassDescriptors.get_person_descriptors(
                    arr_pass[0], min_pose_score)
                descriptor_arr = result_des['fullDesc']

                # Add descriptors to list
                desc_list.append(descriptor_arr)

        # Convert to numpy array
        print('Total poses: {0}'.format(len(desc_list)))

        # Transform list and predict
        desc_list_np = np.asarray(desc_list, dtype=np.float)
        print('ndim pose list: {0}'.format(desc_list_np.ndim))

        list_classes = list()
        predict_results = instance_net.predict_model_array(desc_list_np)
        for result in predict_results:
            list_classes.append(result['classes'])

        print('Predict results: {0}'.format(list_classes))
        print('Classes label: {0}'.format(instance_net.label_names))

        print('Done!')
Пример #21
0
def train_bow(training_list_cls, training_cls_labels, validate_list_cls,
              validate_cls_labels, eval_list_actions, eval_labels,
              option: Option, base_data_1, base_data_2):
    print('Training BoW')

    # Generating BoW descriptors
    train_descriptors = list()
    for list_actions in training_list_cls:
        words = get_bow_descriptors(list_actions)
        train_descriptors.append(words)

    descriptors_np = np.asanyarray(train_descriptors, dtype=np.float)
    training_labels_np = np.asanyarray(training_cls_labels, dtype=np.int)

    # Generating instance_nn and train model
    cls_number = len(list_classes)
    hidden_neurons = 20
    instance_nn = ClassNN(ClassNN.model_dir_activity, cls_number,
                          hidden_neurons)
    instance_nn.train_model(descriptors_np, training_labels_np)

    # Validating model
    validate_descriptors = list()
    for list_actions in validate_list_cls:
        words = get_bow_descriptors(list_actions)
        validate_descriptors.append(words)

    validate_descriptors_np = np.asanyarray(validate_descriptors,
                                            dtype=np.float)
    validate_labels_np = np.asanyarray(validate_cls_labels, dtype=np.int)

    accuracy = instance_nn.eval_model(validate_descriptors_np,
                                      validate_labels_np)
    print('Local accuracy: {0}'.format(accuracy))

    # Evaluating
    eval_descriptors = list()
    for list_actions in eval_list_actions:
        words = get_bow_descriptors(list_actions)
        eval_descriptors.append(words)

    eval_descriptors_np = np.asanyarray(eval_descriptors, dtype=np.float)
    eval_labels_np = np.asanyarray(eval_labels, dtype=np.int)

    real_accuracy = instance_nn.eval_model(eval_descriptors_np, eval_labels_np)
    print('Real accuracy: {0}'.format(real_accuracy))

    classes_number = len(list_classes)
    confusion_np = np.zeros((classes_number, classes_number))
    for i in range(eval_descriptors_np.shape[0]):
        data = eval_descriptors_np[i]
        expected = eval_labels_np[i]
        obtained = instance_nn.predict_model_fast(data)
        class_prediction = obtained['classes']
        print('Class: {0}'.format(class_prediction))

        confusion_np[expected, class_prediction] += 1

    print('Confusion matrix')
    print(confusion_np)

    apply_classifier(option,
                     base_data_1,
                     base_data_2,
                     instance_nn=instance_nn,
                     accuracy=accuracy,
                     real_accuracy=real_accuracy)