def run_experiment_test(base_dir):
    data_base_dir = base_dir  #= r'C:\Users\ORI\Documents\Thesis\dataset_all'
    all_subjects = [
        "RSVP_Color116msVPicr.mat", "RSVP_Color116msVPpia.mat",
        "RSVP_Color116msVPfat.mat", "RSVP_Color116msVPgcb.mat",
        "RSVP_Color116msVPgcc.mat", "RSVP_Color116msVPgcd.mat",
        "RSVP_Color116msVPgcf.mat", "RSVP_Color116msVPgcg.mat",
        "RSVP_Color116msVPgch.mat", "RSVP_Color116msVPiay.mat",
        "RSVP_Color116msVPicn.mat"
    ]

    for subject in all_subjects:
        # subject = "RSVP_Color116msVPgcd.mat"

        file_name = os.path.join(data_base_dir, subject)
        all_data_per_char, target_per_char, train_mode_per_block, all_data_per_char_as_matrix, target_per_char_as_matrix = create_data_rep_training(
            file_name, -200, 800, downsampe_params=8)

        data_generator = triplet_data_generator(
            all_data_per_char_as_matrix[train_mode_per_block == 1],
            target_per_char_as_matrix[train_mode_per_block == 1], 80)

        testing_data, testing_tags = get_all_triplet_combinations_testing(
            all_data_per_char_as_matrix, target_per_char_as_matrix,
            train_mode_per_block)

        valid_data = triplet_data_collection(
            all_data_per_char_as_matrix[train_mode_per_block == 2],
            target_per_char_as_matrix[train_mode_per_block == 2], 80)

        total_number_of_char_in_training = all_data_per_char_as_matrix[
            train_mode_per_block == 1].shape[0] / 10

        # region Build the P300Net model
        model = get_graph_lstm(3, 10, 25, 55)
        # endregion

        # region the P300Net identification model
        P300IdentificationModel = get_item_lstm_subgraph(25, 55)
        P300IdentificationModel.compile(loss='binary_crossentropy',
                                        class_mode="binary",
                                        optimizer='rmsprop')
        # endregion

        # region train the P300Net model
        # model.fit_generator(data_generator, 2880, nb_epoch=10, validation_data=valid_data)
        model.fit_generator(data_generator,
                            80 * 40,
                            nb_epoch=2,
                            validation_data=valid_data)
        # endregion

        # all_train_data = dict()
        train_p300_model(
            P300IdentificationModel,
            all_data_per_char_as_matrix[train_mode_per_block == 1],
            target_per_char_as_matrix[train_mode_per_block == 1])

        final_model = get_item_lstm_subgraph(25, 55)
        final_model_original_weights = final_model.get_weights()

        final_model.compile(loss='binary_crossentropy',
                            class_mode="binary",
                            optimizer='sgd')
        final_model.set_weights(
            list(model.nodes['item_latent'].layer.get_weights()))

        all_prediction_P300Net = predict_p300_model(
            final_model,
            all_data_per_char_as_matrix[train_mode_per_block != 1])
        all_prediction_normal = predict_p300_model(
            P300IdentificationModel,
            all_data_per_char_as_matrix[train_mode_per_block != 1])
        all_prediction_normal = all_prediction_P300Net
        plt.subplot(1, 4, 1)
        # plt.imshow(all_prediction, interpolation='none')
        plt.subplot(1, 4, 2)
        x = T.dmatrix('x')
        import theano

        softmax_res_func = theano.function([x], T.nnet.softmax(x))

        #
        # plt.imshow(softmax_res_func(all_prediction), interpolation='none')
        # plt.subplot(1, 4, 3)
        # plt.imshow(softmax_res_func(np.mean(all_prediction.reshape((-1, 10, 30)), axis=1)).astype(np.int),
        #            interpolation='none')

        plt.subplot(1, 4, 3)
        test_tags = target_per_char_as_matrix[
            train_mode_per_block !=
            1]  # np.array([target_per_char[x][train_mode_per_block != 1] for x in range(30)]).T
        # plt.imshow(np.mean(all_res.reshape((-1, 10, 30)), axis=1), interpolation='none')

        all_res = test_tags

        # plt.imshow(softmax_res_func(all_prediction_normal.reshape((-1, 30))), interpolation='none')

        actual_untrained = np.argmax(softmax_res_func(
            np.mean(all_prediction_normal.reshape((-1, 10, 30)), axis=1)),
                                     axis=1)
        # actual = np.where(np.round(softmax_res_func(np.mean(all_prediction.reshape((-1, 10, 30)), axis=1))) == 1)[0];
        gt = np.argmax(np.mean(all_res.reshape((-1, 10, 30)), axis=1), axis=1)
        # np.intersect1d(actual, gt)
        # accuracy = len(np.intersect1d(actual, gt)) / float(len(gt))

        plt.subplot(1, 2, 1)
        plt.imshow(
            softmax_res_func(
                np.mean(all_prediction_normal.reshape((-1, 10, 30)), axis=1)))
        plt.subplot(1, 2, 2)
        plt.imshow(np.mean(all_res.reshape((-1, 10, 30)), axis=1))
        plt.show()

        accuracy = 0
        accuracy_untrained = np.sum(actual_untrained == gt) / float(len(gt))
        print "subject:{0} accu:{1} acc_untrained{2}".format(
            subject, accuracy, accuracy_untrained)
    for subject in all_subjects:
        # subject = "RSVP_Color116msVPgcd.mat"

        file_name = os.path.join(data_base_dir, subject)
        all_data_per_char, target_per_char, train_mode_per_block, all_data_per_char_as_matrix, target_per_char_as_matrix = create_data_rep_training(
            file_name, -200, 800,downsampe_params=8)

        data_generator = triplet_data_generator(all_data_per_char_as_matrix[train_mode_per_block == 1], target_per_char_as_matrix[train_mode_per_block == 1], 80)

        testing_data, testing_tags = get_all_triplet_combinations_testing(all_data_per_char_as_matrix,
                                                                          target_per_char_as_matrix,
                                                                          train_mode_per_block)


        valid_data = triplet_data_collection(all_data_per_char_as_matrix[train_mode_per_block == 2],
                                             target_per_char_as_matrix[train_mode_per_block == 2], 80)

        total_number_of_char_in_training = all_data_per_char_as_matrix[train_mode_per_block == 1].shape[0]/10

        # region Build the P300Net model
        # model = get_graph_lstm(3, 10, 25,55)
        # endregion

        # region the P300Net identification model
        P300IdentificationModel = get_item_lstm_subgraph(25, 55)
        P300IdentificationModel.compile(loss='binary_crossentropy', class_mode="binary", optimizer='rmsprop')
        # endregion

        # region train the P300Net model
        # model.fit_generator(data_generator, 2880, nb_epoch=10, validation_data=valid_data)
        # model.fit_generator(data_generator, 80*5, nb_epoch=20, validation_data=valid_data)