Beispiel #1
0
def eval(X, Y, args, n_classes=10, exp=0, split=0):
    x_shape = [args.batch_size] + list(X.shape[1:])
    y_shape = [args.batch_size] + list(Y.shape[1:])
    #save_file = args.save_path + "exp_{}/model_split{}.ckpt".format(exp, split)

    with tf.Graph().as_default():
        dset = InputGenerator([None]+list(X.shape[1:]), n_classes, args.z_size, batch_size=args.batch_size, n_epochs=1)
        aae = AAE("test", batch_size=args.batch_size, n_epochs=1, n_classes=n_classes, z_size=args.z_size, input_shape=x_shape)

        iterador = dset.create_eval_generator()
        x_input, y_input = iterador.get_next()
        _, y_tilde = aae.encoder(x_input)
        acc, acc_op = tf.metrics.mean_per_class_accuracy(tf.argmax(y_input, -1), tf.argmax(y_tilde, -1), n_classes)

        saver = tf.train.Saver()
        with tf.Session() as sess:
            saver.restore(sess, tf.train.latest_checkpoint(args.save_path + "exp_{}/".format(exp)))
            sess.run(tf.local_variables_initializer())
            sess.run(iterador.initializer, feed_dict={dset.x_input:X, dset.y_input:Y})

            n_steps = (len(X) // args.batch_size)
            with tqdm(n_steps, desc="Eval", unit="Steps") as pbar:
                try:
                    while True:
                        accuracy, _ = sess.run([acc, acc_op])
                        pbar.update()
                        pbar.set_postfix(Test_Acc=accuracy)
                except tf.errors.OutOfRangeError:
                    pass

            out_string = "Split {} Accuracy: {:02.3f}% \n".format(split + 1, 100 * accuracy)
            print(out_string)
            return out_string
Beispiel #2
0
def get_train_fit(X, Y, args, n_classes=10, exp=0, split=0):
    x_shape = [args.batch_size] + list(X.shape[1:])

    save_file = args.save_path + "exp_{}/model_split{}.ckpt".format(exp, split)

    with tf.Graph().as_default():
        dset = InputGenerator([None] + list(X.shape[1:]),
                              n_classes,
                              args.z_size,
                              batch_size=args.batch_size,
                              n_epochs=1)
        aae = AAE("test",
                  batch_size=args.batch_size,
                  n_epochs=1,
                  n_classes=n_classes,
                  z_size=args.z_size,
                  input_shape=x_shape)

        iterador = dset.create_eval_generator()
        x_input, y_input = iterador.get_next()
        _, y_tilde = aae.encoder(x_input, supervised=True)

        saver = tf.train.Saver()
        with tf.Session() as sess:
            saver.restore(
                sess,
                tf.train.latest_checkpoint(args.save_path +
                                           "exp_{}/".format(exp)))
            sess.run(iterador.initializer,
                     feed_dict={
                         dset.x_input: X,
                         dset.y_input: Y
                     })

            n_steps = (len(X) // args.batch_size)
            with tqdm(n_steps, desc="Novelty Train", unit="Steps",
                      leave=True) as pbar:
                try:
                    train_logits = np.empty([0, n_classes], dtype=np.float32)
                    while True:
                        logit = sess.run(y_tilde)
                        pbar.update()
                        train_logits = np.append(train_logits, logit, axis=0)
                except tf.errors.OutOfRangeError:
                    pass

            assert len(train_logits) == len(Y.argmax(-1))

            om = OpenMax(n_classes)
            try:
                om.fit(train_logits, Y.argmax(-1))
            except ValueError:
                print("No se pudo completar")
                return None
        return om
Beispiel #3
0
def check_dist_space(X, Y, args, n_classes=10, exp=0, split=0):
    x_shape = [args.batch_size] + list(X.shape[1:])
    create_dir("plots/")
    save_file = args.save_path + "exp_{}/model_split{}.ckpt".format(exp, split)

    with tf.Graph().as_default():
        dset = InputGenerator([None] + list(X.shape[1:]),
                              n_classes,
                              args.z_size,
                              batch_size=args.batch_size,
                              n_epochs=1)
        aae = AAE("test",
                  batch_size=args.batch_size,
                  n_epochs=1,
                  n_classes=n_classes,
                  z_size=args.z_size,
                  input_shape=x_shape)

        iterador = dset.create_eval_generator()
        x_input, y_input = iterador.get_next()
        z_hat, _ = aae.encoder(
            x_input)  #Nos interesa el espacio de distribución
        y_class = tf.argmax(y_input, -1)

        saver = tf.train.Saver()
        with tf.Session() as sess:
            saver.restore(
                sess,
                tf.train.latest_checkpoint(args.save_path +
                                           "exp_{}/".format(exp)))
            sess.run(iterador.initializer,
                     feed_dict={
                         dset.x_input: X,
                         dset.y_input: Y
                     })
            n_steps = (len(X) // args.batch_size)

            with tqdm(n_steps, desc="Dist", unit="Steps", leave=False) as pbar:
                try:
                    Z_ = np.empty((0, args.z_size), dtype=np.float32)
                    Y_ = np.empty((0), dtype=np.int32)
                    while True:
                        z_, y = sess.run([z_hat, y_class])
                        Z_ = np.append(Z_, z_, axis=0)
                        Y_ = np.append(Y_, y)
                        pbar.update()
                except tf.errors.OutOfRangeError:
                    pass

            plot_dist(Z_, Y_, split)