Exemple #1
0
    submit = SubmitField('submit')


app = Flask(__name__)
app.config['SECRET_KEY'] = 'you-will-never-guess'
# ... add more variables here as needed


@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
    form = InputForm()
    if form.validate_on_submit():
        print("receive")
        print(form.poeminput.data)
        a = gan.generate_poem(count=8,
                              temperature=1.00,
                              seed=form.poeminput.data)
        print(a)
        flash('Output of seed \'{}\': \n {} '.format(form.poeminput.data, a))
        # print(form.username.data,form.remember_me.data)
        return redirect('/index')
    return render_template('index.html', title='Poem Generate', form=form)


if __name__ == '__main__':
    # server = pywsgi.WSGIServer(('0.0.0.0',80), app)
    # server.serve_forever()
    gan = GAN()
    gan.build()
    app.run(debug='True')
Exemple #2
0
def inference(FLAG):
    FLAG_save_dir = FLAG.save_dir
    FLAG_plot_dir = FLAG.plot_dir
    FLAG_batch_size = 32
    FLAG_n_dim = 100

    gan = GAN()
    gan.build(n_dim=FLAG_n_dim, shape=(64, 64, 3))

    def initialize_uninitialized(sess):
        global_vars = tf.global_variables()
        is_not_initialized = sess.run(
            [tf.is_variable_initialized(var) for var in global_vars])
        not_initialized_vars = [
            v for (v, f) in zip(global_vars, is_not_initialized) if not f
        ]
        if len(not_initialized_vars):
            sess.run(tf.variables_initializer(not_initialized_vars))

    def res_plot(samples, n_row, n_col):
        fig = plt.figure(figsize=(n_col * 2, n_row * 2))
        gs = gridspec.GridSpec(n_row, n_col)
        gs.update(wspace=0.05, hspace=0.05)
        for i, sample in enumerate(samples):
            ax = plt.subplot(gs[i])
            plt.axis('off')
            ax.set_xticklabels([])
            ax.set_yticklabels([])
            ax.set_aspect('equal')
            plt.imshow(sample.reshape(64, 64, 3))
        return fig

    with tf.Session() as sess:
        if FLAG_save_dir is not None:
            sess.run(tf.global_variables_initializer())
            saver = tf.train.Saver()
            ckpt = tf.train.get_checkpoint_state(FLAG_save_dir)

            if ckpt and ckpt.model_checkpoint_path:
                saver.restore(sess, ckpt.model_checkpoint_path)
                print("Model restored %s" % ckpt.model_checkpoint_path)
                sess.run(tf.global_variables())
            else:
                print("No model checkpoint in %s" % FLAG_save_dir)
        else:
            sess.run(tf.global_variables_initializer())
            sess.run(tf.global_variables())
        print("Initialized")
        print("Plot saved in %s" % FLAG_plot_dir)

        # hyper parameters
        batch_size = FLAG_batch_size

        # re-initialize
        initialize_uninitialized(sess)

        # plot
        np.random.seed(296)
        Xplot = sess.run(gan.G_image,
                         feed_dict={
                             gan.random_sample:
                             np.random.uniform(-1, 1,
                                               [batch_size, gan.n_dim]).astype(
                                                   np.float32),
                             gan.is_train:
                             False
                         })
        fig = res_plot(Xplot, int(batch_size / 8), 8)
        plt.savefig(os.path.join(FLAG_plot_dir, 'fig2_3.jpg'),
                    bbox_inches='tight')
        plt.close(fig)
        tb = program.TensorBoard()
        tb.configure(argv=[None, '--logdir', utils.LOG_DIR])
        url = tb.launch()
        print(url)

    # initial scanpy train & test splits
    data = sc.read_h5ad('data/GSE144136_preprocessed.h5ad')
    train = sc.pp.subsample(data=data, fraction=0.90, copy=True, random_state=utils.RANDOM)
    test = sc.pp.subsample(data=data, fraction=0.10, copy=True, random_state=utils.RANDOM)

    # build model
    strategy = tf.distribute.MirroredStrategy()
    with strategy.scope():
        model = GAN(gex_size=train.shape[1], num_cells_generate=test.shape[0])
        model.compile()
        model.build(input_shape=(model.hyperparams.batch_size, model.hyperparams.latent_dim))  # req. for subclassed models

    # process data for training
    train_tf = tf.data.Dataset.from_tensor_slices(train.X). \
        cache(). \
        shuffle(buffer_size=train.shape[0], seed=utils.RANDOM). \
        batch(batch_size=model.hyperparams.batch_size * strategy.num_replicas_in_sync, num_parallel_calls=tf.data.AUTOTUNE). \
        prefetch(buffer_size=tf.data.AUTOTUNE)
    train_tf_distributed = strategy.experimental_distribute_dataset(train_tf)

    test_tf = tf.data.Dataset.from_tensor_slices(test.X). \
        cache(). \
        shuffle(buffer_size=test.shape[0], seed=utils.RANDOM). \
        prefetch(buffer_size=tf.data.AUTOTUNE)

    tb_callback = tf.keras.callbacks.TensorBoard(log_dir=utils.LOG_DIR + datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),