예제 #1
0
def main(_):
    resized_width = 64
    resized_height = 64

    since = time.time()

    image_lists = data_process.create_image_lists(FLAGS.images_dir)

    test_datas, test_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, 'test', resized_width,
        resized_height)

    train_datas, train_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, 'train', resized_width,
        resized_height)

    print(train_datas.shape)
    print(train_labels.shape)

    model = svm.SVC(kernel='rbf', verbose=True, probability=True)
    model.fit(train_datas, np.argmax(train_labels, axis=1))

    pred_prob = model.predict_proba(test_datas)
    test_acc = np.mean(
        np.equal(np.argmax(pred_prob, axis=1), np.argmax(test_labels, axis=1)))
    print('Test accuracy:', test_acc)

    save_model_path = os.path.join(FLAGS.path, "save_model", "SVM",
                                   "Nonlinear_SVM.pickle")

    if not os.path.exists(save_model_path):
        os.makedirs(save_model_path)

    with open(save_model_path, 'wb') as f:
        pickle.dump(model, f)

    time_elapsed = time.time() - since

    print("Runtime: {}min, {:0.2f}sec".format(int(time_elapsed // 60),
                                              time_elapsed % 60))

    save_results_path = os.path.join(FLAGS.path, "results/SVM", "results.txt")

    if not os.path.exists(save_results_path):
        os.makedirs(save_results_path)

    with open(save_results_path, "w") as f:
        f.write("Test accuracy =  {}".format(test_acc) + "\n")
        f.write("Runtime: " + str(int(time_elapsed // 60)) + "min," +
                str(time_elapsed % 60) + "sec")
def predict(path, images_dir, resized_width, resized_height):
    image_lists = data_process.create_image_lists(images_dir)

    test_datas, test_labels = data_process.get_batch_of_data(
        image_lists, -1, images_dir, 'test', resized_width, resized_height)

    save_model_path = os.path.join(FLAGS.path, "save_model", "SVM",
                                   "Nonlinear_SVM.pickle")

    with open(save_model_path, 'rb') as f:
        model = pickle.loads(f.read())

    pred_prob = model.predict_proba(test_datas)
    test_acc = np.mean(
        np.equal(np.argmax(pred_prob, axis=1), np.argmax(test_labels, axis=1)))
    print('Test accuracy:', test_acc)

    pre_data_dir = os.path.join("../../predictData/SVM")

    if not os.path.exists(pre_data_dir):
        os.makedirs(pre_data_dir)

    with h5py.File(os.path.join(pre_data_dir, "prediction_and_labels.h5"),
                   "w") as f:
        f["prediction"] = pred_prob
        f["truth"] = test_labels
def main(_):
    resized_width = 128
    resized_height = 128

    model = build_CNN()

    opt = Adam(lr=FLAGS.learning_rate)
    model.compile(loss="categorical_crossentropy",
                  optimizer=opt,
                  metrics=["accuracy"])

    model.load_weights(os.path.join(FLAGS.path, "save_model", "CNN.h5df"))

    image_lists = data_process.create_image_lists(FLAGS.images_dir)

    test_datas, test_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, "test", resized_width,
        resized_height)

    test_loss, test_acc = model.evaluate(test_datas, test_labels)

    print("Test accuracy:{0:.4f}, test loss:{1:.4f}".format(
        test_acc, test_loss))

    prediction = model.predict(test_datas)

    pre_data_dir = "../../predictData/CNN"

    if not os.path.exists(pre_data_dir):
        os.makedirs(pre_data_dir)

    with h5py.File(os.path.join(pre_data_dir, "prediction_and_labels.h5"),
                   "w") as f:
        f["prediction"] = prediction
        f["truth"] = test_labels
예제 #4
0
def generate_train_data(image_lists, images_dir, batch_size, resized_width,
                        resized_height):
    while (True):
        train_datas, train_labels = data_process.get_batch_of_data(
            image_lists, batch_size, images_dir, "train", resized_width,
            resized_height)

        yield (train_datas, train_labels)
예제 #5
0
def predict(path, images_dir, tree_num, resized_width, resized_height):
    X = tf.placeholder(tf.float32, shape=[None, 128 * 128 * 3])
    Y = tf.placeholder(tf.float32, shape=[None])

    image_lists = data_process.create_image_lists(images_dir)

    train_step, randomforest_loss, accuracy, output = randomforest(
        X, Y, 128 * 128 * 3, 6, tree_num)

    saver = tf.train.Saver()

    init = tf.group(
        tf.global_variables_initializer(),
        resources.initialize_resources(resources.shared_resources()))

    with tf.Session(config=config) as sess:
        sess.run(init)

        ckpt = tf.train.latest_checkpoint(os.path.join(path, 'save_model'))
        print(ckpt)
        saver.restore(sess, ckpt)

        test_datas, test_labels = data_process.get_batch_of_data(
            image_lists, -1, images_dir, 'test', resized_width, resized_height)

        labels = np.argmax(test_labels, axis=1)

        print(test_datas.shape)
        print(test_labels.shape)
        print(labels.shape)

        test_acc, prediction = sess.run([accuracy, output],
                                        feed_dict={
                                            X: test_datas,
                                            Y: labels
                                        })
        print('Test accuracy:{0:.4f}'.format(test_acc))

        pre_data_dir = os.path.join("../../predictData/RandomForest")

        if not os.path.exists(pre_data_dir):
            os.makedirs(pre_data_dir)

        with h5py.File(os.path.join(pre_data_dir, "prediction_and_labels.h5"),
                       "w") as f:
            f["prediction"] = prediction
            f["truth"] = test_labels
예제 #6
0
def main(_):
    resized_width = 128
    resized_height = 128

    model = build_CNN()

    opt = Adam(lr=FLAGS.learning_rate)
    model.compile(loss="categorical_crossentropy",
                  optimizer=opt,
                  metrics=["accuracy"])

    model.load_weights(os.path.join(FLAGS.path,"weights/weights/", "model.h5df"))

    image_lists = data_process.create_image_lists(FLAGS.images_dir)

    test_datas, test_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, "test", resized_width, resized_height)

    test_loss, test_acc = model.evaluate(test_datas, test_labels)

    print("Test accuracy:{0:.4f}, test loss:{1:.4f}".format(test_acc, test_loss))
예제 #7
0
def main(_):
    resized_width = 128
    resized_height = 128

    since = time.time()

    model = build_CNN()
    model.summary()

    opt = Adam(lr=FLAGS.learning_rate)
    model.compile(loss="categorical_crossentropy",
                  optimizer=opt,
                  metrics=["accuracy"])

    result = get_result()

    checkpoint_path = os.path.join(FLAGS.path, "save_model", "CNN.h5df")
    checkpoint = ModelCheckpoint(filepath=checkpoint_path,
                                 save_best_only=True,
                                 save_weights_only=True,
                                 monitor="val_acc",
                                 mode=max)

    tb = TensorBoard(log_dir=os.path.join(FLAGS.path, "results/results/logs"))

    callbacks = [result, checkpoint, tb]

    image_lists = data_process.create_image_lists(FLAGS.images_dir)

    with gfile.FastGFile(os.path.join(FLAGS.path, "results/output_labels.txt"),
                         "w") as f:
        f.write("\n".join(image_lists.keys()) + "\n")

    val_datas, val_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, "val", resized_width,
        resized_height)

    model.fit_generator(generate_train_data(image_lists, FLAGS.images_dir,
                                            FLAGS.batch_size, resized_width,
                                            resized_height),
                        epochs=FLAGS.epochs,
                        steps_per_epoch=100,
                        validation_data=(val_datas, val_labels),
                        callbacks=callbacks)

    test_datas, test_labels = data_process.get_batch_of_data(
        image_lists, -1, FLAGS.images_dir, "test", resized_width,
        resized_height)

    test_loss1, test_acc1 = model.evaluate(test_datas, test_labels)

    print("Test accuracy:{0:.4f}, test loss:{1:.4f}".format(
        test_acc1, test_loss1))

    model.load_weights(os.path.join(FLAGS.path, "save_model", "CNN.h5df"))
    test_loss2, test_acc2 = model.evaluate(test_datas, test_labels)

    time_elapsed = time.time() - since

    print("Test accuracy with best validation =  {}".format(test_acc2 * 100))
    print("Final test accuracy =  {}".format(test_acc1 * 100))
    print("Total Model Runtime: {}min, {:0.2f}sec".format(
        int(time_elapsed // 60), time_elapsed % 60))

    with open(os.path.join(FLAGS.path, "results/results/results.txt"),
              "w") as f:
        f.write("Test accuracy with best validation:" + str(test_acc2) + "\n")
        f.write("Final test accuracy: " + str(test_acc1) + "\n")
        f.write("Total Model Runtime: " + str(int(time_elapsed // 60)) +
                "min," + str(time_elapsed % 60) + "sec")
예제 #8
0
def main(_):
    resized_width = 128
    resized_height = 128

    since = time.time()

    X = tf.placeholder(tf.float32, shape=[None, 128 * 128 * 3])
    Y = tf.placeholder(tf.float32, shape=[None])

    image_lists = data_process.create_image_lists(FLAGS.images_dir)

    class_count = len(image_lists.keys())

    train_step, randomforest_loss, accuracy = randomforest(
        X, Y, 128 * 128 * 3, class_count, FLAGS.tree_num)

    saver = tf.train.Saver()

    save_model_path = os.path.join(FLAGS.path, "save_model", "RandomForest")
    if not os.path.exists(save_model_path):
        os.makedirs(save_model_path)

    save_results_path = os.path.join(FLAGS.path, "results", "results.txt")
    if not os.path.exists(save_results_path):
        os.makedirs(save_results_path)

    init = tf.group(
        tf.global_variables_initializer(),
        resources.initialize_resources(resources.shared_resources()))

    with tf.Session(config=config) as sess:
        sess.run(init)

        merged = tf.summary.merge_all()
        train_writer = tf.summary.FileWriter(FLAGS.path + "results/logs/train",
                                             sess.graph)

        validation_writer = tf.summary.FileWriter(FLAGS.path +
                                                  "results/logs/validation")

        val_datas, val_labels = data_process.get_batch_of_data(
            image_lists, -1, FLAGS.images_dir, "val", resized_width,
            resized_height)

        val_labels = np.argmax(val_labels, axis=1)

        test_datas, test_labels = data_process.get_batch_of_data(
            image_lists, -1, FLAGS.images_dir, "test", resized_width,
            resized_height)

        test_labels = np.argmax(test_labels, axis=1)

        best_acc = 0.0

        for i in range(FLAGS.iters):
            train_datas, train_labels = data_process.get_batch_of_data(
                image_lists, FLAGS.batch_size, FLAGS.images_dir, "train",
                resized_width, resized_height)

            train_labels = np.argmax(train_labels, axis=1)

            sess.run(train_step, feed_dict={X: train_datas, Y: train_labels})

            if (i % 10 == 0):
                train_loss, train_acc = sess.run([randomforest_loss, accuracy],
                                                 feed_dict={
                                                     X: train_datas,
                                                     Y: train_labels
                                                 })
                train_summary = sess.run(merged,
                                         feed_dict={
                                             X: train_datas,
                                             Y: train_labels
                                         })

                val_loss, val_acc = sess.run([randomforest_loss, accuracy],
                                             feed_dict={
                                                 X: val_datas,
                                                 Y: val_labels
                                             })
                val_summary = sess.run(merged,
                                       feed_dict={
                                           X: val_datas,
                                           Y: val_labels
                                       })

                train_writer.add_summary(train_summary, i)
                validation_writer.add_summary(val_summary, i)

                if (val_acc > best_acc):
                    best_acc = val_acc
                    test_loss1, test_acc1 = sess.run(
                        [randomforest_loss, accuracy],
                        feed_dict={
                            X: test_datas,
                            Y: test_labels
                        })
                    saver = tf.train.Saver()
                    saver.save(sess, save_model_path)

                print(
                    "Iteration {0:d}: train loss:{1:f}, train acc:{2:f}, val loss:{3:f}, val acc:{4:f}"
                    .format(i, train_loss, train_acc, val_loss, val_acc))

        test_loss2, test_acc2 = sess.run([randomforest_loss, accuracy],
                                         feed_dict={
                                             X: test_datas,
                                             Y: test_labels
                                         })

        print("Best validation accuracy = {}".format(best_acc * 100))
        print("Test accuracy with best validation =  {}".format(test_acc1 *
                                                                100))
        print("Final test accuracy =  {}".format(test_acc2 * 100))

    time_elapsed = time.time() - since

    print("Runtime: {}min, {:0.2f}sec".format(int(time_elapsed // 60),
                                              time_elapsed % 60))

    with open(save_results_path, "w") as f:
        f.write("Best validation accuracy: " + str(best_acc) + "\n")
        f.write("Test accuracy with best validation: " + str(test_acc1) + "\n")
        f.write("Final test accuracy =  {}".format(test_acc2 * 100) + "\n")
        f.write("Runtime: " + str(int(time_elapsed // 60)) + "min," +
                str(time_elapsed % 60) + "sec")