def def_invert(self, model, batch_size=1, beta=0.5, lr=0.1, b1=0.9, nz=100, use_bin=True): beta_r = sharedX(beta) x_c = T.tensor4() m_c = T.tensor4() x_e = T.tensor4() m_e = T.tensor4() z0 = T.matrix() z = sharedX(floatX(np_rng.uniform(-1., 1., size=(batch_size, nz)))) gx = model.model_G(z) mm_c = T.tile(m_c, (1, gx.shape[1], 1, 1)) color_all = T.mean(T.sqr(gx - x_c) * mm_c, axis=(1, 2, 3)) / ( T.mean(m_c, axis=(1, 2, 3)) + sharedX(1e-5)) gx_edge = HOGNet.get_hog(gx, use_bin) x_edge = HOGNet.get_hog(x_e, use_bin) mm_e = T.tile(m_e, (1, gx_edge.shape[1], 1, 1)) sum_e = T.sum(T.abs_(mm_e)) sum_x_edge = T.sum(T.abs_(x_edge)) edge_all = T.mean(T.sqr(x_edge - gx_edge) * mm_e, axis=(1, 2, 3)) / ( T.mean(m_e, axis=(1, 2, 3)) + sharedX(1e-5)) rec_all = color_all + edge_all * sharedX(0.2) z_const = sharedX(10.0) init_all = T.mean(T.sqr(z0 - z)) * z_const if beta > 0: print('using D') p_gen = model.model_D(gx) real_all = T.nnet.binary_crossentropy(p_gen, T.ones( p_gen.shape)).T # costs.bce(p_gen, T.ones(p_gen.shape)) cost_all = rec_all + beta_r * real_all[0] + init_all else: print('without D') cost_all = rec_all + init_all real_all = T.zeros(cost_all.shape) cost = T.sum(cost_all) d_updater = updates.Adam( lr=sharedX(lr), b1=sharedX(b1)) # ,regularizer=updates.Regularizer(l2=l2)) output = [ gx, cost, cost_all, rec_all, real_all, init_all, sum_e, sum_x_edge ] print 'COMPILING...' t = time() z_updates = d_updater([z], cost) _invert = theano.function(inputs=[x_c, m_c, x_e, m_e, z0], outputs=output, updates=z_updates) print '%.2f seconds to compile _invert function' % (time() - t) return [_invert, z_updates, z, beta_r, z_const]
def def_bfgs(model_G, layer='conv4', npx=64, alpha=0.002): print('COMPILING...') t = time() # 符号化定义 x_f = T.tensor4() x = T.tensor4() z = T.matrix() # 随机种子 tanh = activations.Tanh() gx = model_G(tanh(z)) # 生成的图像 if layer is 'hog': gx_f = HOGNet.get_hog(gx, use_bin=True, BS=4) else: # 调整图像格式 gx_t = AlexNet.transform_im(gx) gx_net = AlexNet.build_model(gx_t, layer=layer, shape=(None, 3, npx, npx)) AlexNet.load_model(gx_net, layer=layer) # AlexNet截止在layer的输出 gx_f = lasagne.layers.get_output(gx_net[layer], deterministic=True) f_rec = T.mean(T.sqr(x_f - gx_f), axis=(1, 2, 3)) * sharedX(alpha) x_rec = T.mean(T.sqr(x - gx), axis=(1, 2, 3)) cost = T.sum(f_rec) + T.sum(x_rec) grad = T.grad(cost, z) output = [cost, grad, gx] _invert = theano.function(inputs=[z, x, x_f], outputs=output) print('%.2f seconds to compile _bfgs function' % (time() - t)) return _invert, z
def def_bfgs(net, layer='conv4', npx=64, alpha=0.002): print('COMPILING...') t = time() x_f = T.tensor4() x = T.tensor4() z = T.matrix() z = theano.printing.Print('this is z')(z) tanh = activations.Tanh() tz = tanh(z) # tz = printing_op(tz) # tz = z_scale * tz net.labels_var = T.TensorType('float32', [False] * 512) ('labels_var') gx = net.G.eval(z, net.labels_var, ignore_unused_inputs=True) # gx = printing_op(gx) # gx = misc.adjust_dynamic_range(gx, [-1,1], [0,1]) scale_factor = 16 gx = theano.tensor.signal.pool.pool_2d(gx, ds=(scale_factor, scale_factor), mode='average_exc_pad', ignore_border=True) # gx = printing_op(gx) if layer is 'hog': gx_f = HOGNet.get_hog(gx, use_bin=True, BS=4) else: gx_t = AlexNet.transform_im(gx) gx_net = AlexNet.build_model(gx_t, layer=layer, shape=(None, 3, npx, npx)) AlexNet.load_model(gx_net, layer=layer) gx_f = lasagne.layers.get_output(gx_net[layer], deterministic=True) f_rec = T.mean(T.sqr(x_f - gx_f), axis=(1, 2, 3)) * sharedX(alpha) x_rec = T.mean(T.sqr(x - gx), axis=(1, 2, 3)) cost = T.sum(f_rec) + T.sum(x_rec) grad = T.grad(cost, z) output = [cost, grad, gx] _invert = theano.function(inputs=[z, x, x_f], outputs=output) print('%.2f seconds to compile _bfgs function' % (time() - t)) return _invert,z