sess.run(init)

    # restore previous model if there is one
    ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
    if ckpt and ckpt.model_checkpoint_path:
        print "Restoring previous model..."
        try:
            saver.restore(sess, ckpt.model_checkpoint_path)
            print "Model restored"
        except:
            print "Could not restore model"
            raise
            exit()

    print 'Loading data...'
    train_images, train_annots, train_ids, test_images, test_annots, test_ids = data_ops.load_efigi(
        DATA_DIR, CLASSES, 64)
    test_len = len(test_ids)

    print 'generating data...'
    idx = np.random.choice(np.arange(test_len), BATCH_SIZE, replace=False)
    batch_z = np.random.normal(-1.0, 1.0, size=[BATCH_SIZE,
                                                100]).astype(np.float32)
    batch_z[1:] = batch_z[0]
    batch_y = test_annots[idx]
    batch_y[:NUM +
            1] = batch_y[1]  # gotta make sure they have the same attributes

    # making up my own
    #batch_y[0][0] = 1. # arm strength
    #batch_y[0][1] = 1. # arm curvature
    #batch_y[0][2] = 0. # visible dust
예제 #2
0
    exp_info = dict()
    exp_info['CHECKPOINT_DIR'] = CHECKPOINT_DIR
    exp_info['BATCH_SIZE'] = BATCH_SIZE
    exp_info['DATA_DIR'] = DATA_DIR
    exp_info['CLASSES'] = classes
    exp_info['EPOCHS'] = EPOCHS
    exp_info['LOSS'] = LOSS

    exp_pkl = open(CHECKPOINT_DIR + 'info.pkl', 'wb')
    data = pickle.dumps(exp_info)
    exp_pkl.write(data)
    exp_pkl.close()

    print 'Loading data...'
    train_images, train_annots, train_ids, test_images, test_annots, test_ids = data_ops.load_efigi(
        DATA_DIR, classes, 64)

    # find y dimension
    idx_ = np.array(
        [1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 31, 34, 37, 40, 43, 46, 49])
    idx_ = np.multiply(classes[:-1], idx_)
    idx = [x for x in idx_ if x != 0]
    y_dim = len(idx)
    # account for redshift attribute
    if classes[-1] == 1: y_dim += 1

    # placeholders for data going into the network
    global_step = tf.Variable(0, name='global_step', trainable=False)
    real_images = tf.placeholder(tf.float32,
                                 shape=(BATCH_SIZE, 64, 64, 3),
                                 name='real_images')