Ejemplo n.º 1
0
    sess.run(init)

    # restore previous model if there is one
    ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
    if ckpt and ckpt.model_checkpoint_path:
        print "Restoring previous model..."
        try:
            saver.restore(sess, ckpt.model_checkpoint_path)
            print "Model restored"
        except:
            print "Could not restore model"
            raise
            exit()

    print 'Loading data...'
    images, annots, test_images, test_annots, _ = data_ops.load_galaxy(
        DATA_DIR)
    test_len = len(test_annots)

    print 'generating data...'
    idx = np.random.choice(np.arange(test_len), BATCH_SIZE, replace=False)
    batch_y = test_annots[idx]
    # the four z vectors to interpolate between
    f_z = np.random.normal(-1.0, 1.0, size=[4, 100]).astype(np.float32)

    # contains rows of images
    gen_imgs = []

    y1 = batch_y[0]
    y2 = batch_y[1]
    y3 = batch_y[2]
    y4 = batch_y[3]
Ejemplo n.º 2
0
    # restore previous model if there is one
    ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
    if ckpt and ckpt.model_checkpoint_path:
        print "Restoring previous model..."
        try:
            saver.restore(sess, ckpt.model_checkpoint_path)
            print "Model restored"
        except:
            print "Could not restore model"
            pass

    print 'Loading data...'

    # images and annots: _, __
    train_images, train_annots, test_images, test_annots, paths = data_ops.load_galaxy(
        DATA_DIR)

    test_len = len(test_annots)
    print 'test num:', test_len

    info = {}

    # want to write out a file with the image path and z vector
    for p, img, label in tqdm(zip(paths, test_images, test_annots)):
        img = data_ops.normalize(img)
        batch_images = np.expand_dims(img, 0)
        encoding = sess.run([encoded], feed_dict={images: batch_images})[0][0]
        info[p] = [encoding, label]

    # write out dictionary to pickle file
    p = open(OUTPUT_DIR + 'data.pkl', 'wb')