예제 #1
0
파일: rbm.py 프로젝트: ysmiraak/lgm
 def pcd(self, sess, wtr, batchit, k= 4, lr= 0.01, steps= 0, step_plot= 0, plot= plot_fn('recons')):
     if not (plot and step_plot): step_plot = 1 + steps
     for step in range(1, 1 + steps):
         self.step += 1
         # todo summarise loss
         sess.run(self.up, feed_dict= {self.v_: next(batchit), self.k_: k, self.lr_: lr})
         if not (step % step_plot):
             plot(sess, wtr, sess.run(self.v), self.step)
예제 #2
0
파일: dbn.py 프로젝트: ysmiraak/lgm
 def pre(self, sess, wtr, batchit, k= 4, lr= 0.01, steps= 0, step_plot= 0, sleep= 0):
     h2v = lambda x: x
     for rbm in self.rbm:
         # plot function from this rbm down to the bottom
         rbm.plot = plot_fn(rbm.scope)
         plot = lambda sess, wtr, v, step= None, rbm= rbm: rbm.plot(
             sess, wtr, step= rbm.step if step is None else step
             , v= h2v(v))
         # train this rbm
         rbm.pcd(sess, wtr, batchit, k= k, lr= lr, steps= steps, step_plot= step_plot, plot= plot)
         # downward closure of this rbm, to be used by the next plot function
         rbm.h2v = binary(tf.matmul(rbm.h, rbm.w, transpose_b= True))
         h2v = lambda h, rbm= rbm, h2v= h2v: h2v(sess.run(rbm.h2v, feed_dict= {rbm.h: h}))
         # # generate hidden states from this rbm
         # batchit = rbm.gen(sess, k= k, ret_v= False, ret_h= True)
         # upward closure of this rbm, translating visibles to hiddens
         rbm.v2h = binary(rbm.hgv, transform= False, threshold= False)
         v2h = lambda v, rbm= rbm: sess.run(rbm.v2h, feed_dict= {rbm.v_: v})
         batchit = map(v2h, batchit)
     for _ in range(sleep): sess.run(self.sleep, feed_dict= {self.k_: k, self.lr_: lr})
예제 #3
0
파일: rbm.py 프로젝트: ysmiraak/lgm
            ret = self.gibbs[1]
        else:
            raise StopIteration("not ret_v and not ret_h")
        while True: yield sess.run(ret, feed_dict= {self.k_: k})


if False:
    from utils import mnist
    batchit = mnist(batch_size= 100, ds= 'train', with_labels= False, binary= True)

    rbm = Rbm(28*28, 512, samples= 100)
    sess = tf.InteractiveSession()
    sess.run(tf.global_variables_initializer())

    # rm -r log
    # tf.summary.FileWriter("log/rbm", sess.graph).close()
    # tf.reset_default_graph()
    # sess.close()

    wtr = tf.summary.FileWriter("log/rbm")
    rbm.pcd(sess, wtr, batchit, k= 4, lr= 0.01, steps= 60000, step_plot= 10000)
    rbm.pcd(sess, wtr, batchit, k= 4, lr= 0.001, steps= 12000, step_plot= 3000)
    wtr.close()

    plot = plot_fn('gen1k')
    with tf.summary.FileWriter("log/rbm") as wtr:
        for step, v in zip(range(10), rbm.gen(sess, k= 1000, v= next(batchit))):
            plot(sess, wtr, v, step)

    tf.train.Saver().save(sess, "./models/rbm")
예제 #4
0
파일: sbn.py 프로젝트: ysmiraak/lgm
        return sess.run(self.v, feed_dict= {self.a: a})

    def gen(self, sess):
        while True: yield sess.run(self.v)


if False:
    from utils import mnist
    batchit = mnist(batch_size= 100, ds= 'train', with_labels= False, binary= True)

    sbn = Sbn((784, 210, 56, 15, 4), samples= 100)
    sess = tf.InteractiveSession()
    sess.run(tf.global_variables_initializer())

    # rm -r log/sbn
    # tf.summary.FileWriter("log/sbn", sess.graph).close()
    # tf.reset_default_graph()
    # sess.close()

    with tf.summary.FileWriter("log/sbn") as wtr:
        sbn.fit(sess, wtr, batchit, lr= 0.01, steps= 600000, step_plot= 60000)

    plot = plot_fn('gen')
    b = 0, 1
    q = np.array(list(product(b, b, b, b)), dtype= np.bool)
    for n, a in enumerate((np.tile(q, (100, 1)) for q in q)):
        with tf.summary.FileWriter("log/sbn/res{:02d}".format(n)) as wtr:
            plot(sess, wtr, sbn.ans(sess, a), sbn.step)

    tf.train.Saver().save(sess, "./models/sbn")
예제 #5
0
    vae = Vae(dat, dim_rec=(128, 128), dim_z=128, dim_gen=(128, 128))
    sess = tf.InteractiveSession()
    sess.run(tf.global_variables_initializer())

    # rm -r log
    # tf.summary.FileWriter('log/vae', sess.graph).close()
    # tf.reset_default_graph()
    # sess.close()

    loss = tf.summary.merge((tf.summary.scalar(name='loss', tensor=vae.loss),
                             tf.summary.scalar(name='loss_recons',
                                               tensor=vae.loss_recons),
                             tf.summary.scalar(name='loss_relent',
                                               tensor=vae.loss_relent)))
    plot = plot_fn("recons")
    fd = {vae.bs_: 100}

    with tf.summary.FileWriter('log/vae') as wtr:
        for step in range(60000):
            vae.step += 1
            if vae.step % 60:
                sess.run(vae.up, feed_dict=fd)
            else:
                summ, _ = sess.run((loss, vae.up), feed_dict=fd)
                wtr.add_summary(summ, vae.step)
            if not (vae.step % 6000):
                plot(sess, wtr, sess.run(vae.g, feed_dict=fd), vae.step / 6000)

    with tf.summary.FileWriter('log/vae/gen') as wtr:
        plot(