def def_invert(self,
                   model,
                   batch_size=1,
                   beta=0.5,
                   lr=0.1,
                   b1=0.9,
                   nz=100,
                   use_bin=True):
        beta_r = sharedX(beta)
        x_c = T.tensor4()
        m_c = T.tensor4()
        x_e = T.tensor4()
        m_e = T.tensor4()
        z0 = T.matrix()
        z = sharedX(floatX(np_rng.uniform(-1., 1., size=(batch_size, nz))))
        gx = model.model_G(z)

        mm_c = T.tile(m_c, (1, gx.shape[1], 1, 1))
        color_all = T.mean(T.sqr(gx - x_c) * mm_c, axis=(1, 2, 3)) / (
            T.mean(m_c, axis=(1, 2, 3)) + sharedX(1e-5))
        gx_edge = HOGNet.get_hog(gx, use_bin)
        x_edge = HOGNet.get_hog(x_e, use_bin)
        mm_e = T.tile(m_e, (1, gx_edge.shape[1], 1, 1))
        sum_e = T.sum(T.abs_(mm_e))
        sum_x_edge = T.sum(T.abs_(x_edge))
        edge_all = T.mean(T.sqr(x_edge - gx_edge) * mm_e, axis=(1, 2, 3)) / (
            T.mean(m_e, axis=(1, 2, 3)) + sharedX(1e-5))
        rec_all = color_all + edge_all * sharedX(0.2)
        z_const = sharedX(10.0)
        init_all = T.mean(T.sqr(z0 - z)) * z_const

        if beta > 0:
            print('using D')
            p_gen = model.model_D(gx)
            real_all = T.nnet.binary_crossentropy(p_gen, T.ones(
                p_gen.shape)).T  # costs.bce(p_gen, T.ones(p_gen.shape))
            cost_all = rec_all + beta_r * real_all[0] + init_all
        else:
            print('without D')
            cost_all = rec_all + init_all
            real_all = T.zeros(cost_all.shape)

        cost = T.sum(cost_all)
        d_updater = updates.Adam(
            lr=sharedX(lr),
            b1=sharedX(b1))  # ,regularizer=updates.Regularizer(l2=l2))
        output = [
            gx, cost, cost_all, rec_all, real_all, init_all, sum_e, sum_x_edge
        ]

        print 'COMPILING...'
        t = time()

        z_updates = d_updater([z], cost)
        _invert = theano.function(inputs=[x_c, m_c, x_e, m_e, z0],
                                  outputs=output,
                                  updates=z_updates)
        print '%.2f seconds to compile _invert function' % (time() - t)
        return [_invert, z_updates, z, beta_r, z_const]
示例#2
0
def def_bfgs(model_G, layer='conv4', npx=64, alpha=0.002):
    print('COMPILING...')
    t = time()

    # 符号化定义
    x_f = T.tensor4()
    x = T.tensor4()
    z = T.matrix()  # 随机种子
    tanh = activations.Tanh()
    gx = model_G(tanh(z))  # 生成的图像

    if layer is 'hog':
        gx_f = HOGNet.get_hog(gx, use_bin=True, BS=4)
    else:
        # 调整图像格式
        gx_t = AlexNet.transform_im(gx)
        gx_net = AlexNet.build_model(gx_t,
                                     layer=layer,
                                     shape=(None, 3, npx, npx))
        AlexNet.load_model(gx_net, layer=layer)
        # AlexNet截止在layer的输出
        gx_f = lasagne.layers.get_output(gx_net[layer], deterministic=True)

    f_rec = T.mean(T.sqr(x_f - gx_f), axis=(1, 2, 3)) * sharedX(alpha)
    x_rec = T.mean(T.sqr(x - gx), axis=(1, 2, 3))
    cost = T.sum(f_rec) + T.sum(x_rec)
    grad = T.grad(cost, z)
    output = [cost, grad, gx]
    _invert = theano.function(inputs=[z, x, x_f], outputs=output)

    print('%.2f seconds to compile _bfgs function' % (time() - t))
    return _invert, z
    def preprocess_constraints(self, constraints):
        [im_c_o, mask_c_o, im_e_o,
         mask_e_o] = self.combine_constraints(constraints)
        if self.verbose:
            print('preprocess constraints')
        # utils.CVShow(self.prev_im_c, 'input color image')
        # utils.CVShow(self.prev_mask_c, 'input color mask')
        # utils.CVShow(self.prev_im_e, 'input sketch image')
        # utils.CVShow(self.prev_mask_c, 'input sketch mask')

        im_c = self.transform(im_c_o[np.newaxis, :])
        mask_c = self.transform_mask(mask_c_o[np.newaxis, :])
        im_e = self.transform(im_e_o[np.newaxis, :])
        # utils.debug_trace()

        mask_t = self.transform_mask(mask_e_o[np.newaxis, :])

        mask_e = HOGNet.comp_mask(mask_t)
        # if self.verbose:
        #     print('mask t shape', mask_t.shape)
        #     print('hog mask shape', mask_e.shape)
        shp = [self.batch_size, 1, 1, 1]

        im_c_t = np.tile(im_c, shp)
        mask_c_t = np.tile(mask_c, shp)
        im_e_t = np.tile(im_e, shp)
        mask_e_t = np.tile(mask_e, shp)
        return [im_c_t, mask_c_t, im_e_t, mask_e_t]
 def __init__(self, model, batch_size=32, d_weight=0.0):
     self.model = model
     self.npx = model.npx
     self.nc = model.nc
     self.nz = model.nz
     self.model_name = model.model_name
     self.transform = model.transform
     self.transform_mask = model.transform_mask
     self.inverse_transform = model.inverse_transform
     BS = 4 if self.nc == 1 else 8  # [hack]
     self.hog = HOGNet.HOGNet(use_bin=True, NO=8, BS=BS, nc=self.nc)
     self.opt_model = self.def_invert(model, batch_size=batch_size, d_weight=d_weight, nc=self.nc)
     self.batch_size = batch_size
def def_bfgs(net, layer='conv4', npx=64, alpha=0.002):
    print('COMPILING...')
    t = time()

    x_f = T.tensor4()
    x = T.tensor4()
    z = T.matrix()

    z = theano.printing.Print('this is z')(z)
    tanh = activations.Tanh()
    tz = tanh(z)
    # tz = printing_op(tz)

    # tz = z_scale * tz
    net.labels_var  = T.TensorType('float32', [False] * 512) ('labels_var')
    gx = net.G.eval(z, net.labels_var, ignore_unused_inputs=True)
    # gx = printing_op(gx)
    # gx = misc.adjust_dynamic_range(gx, [-1,1], [0,1])
    scale_factor = 16
    gx = theano.tensor.signal.pool.pool_2d(gx, ds=(scale_factor, scale_factor), mode='average_exc_pad', ignore_border=True)
    # gx = printing_op(gx)

    if layer is 'hog':
        gx_f = HOGNet.get_hog(gx, use_bin=True, BS=4)
    else:
        gx_t = AlexNet.transform_im(gx)
        gx_net = AlexNet.build_model(gx_t, layer=layer, shape=(None, 3, npx, npx))
        AlexNet.load_model(gx_net, layer=layer)
        gx_f = lasagne.layers.get_output(gx_net[layer], deterministic=True)

    f_rec = T.mean(T.sqr(x_f - gx_f), axis=(1, 2, 3)) * sharedX(alpha)
    x_rec = T.mean(T.sqr(x - gx), axis=(1, 2, 3))
    cost = T.sum(f_rec) + T.sum(x_rec)
    grad = T.grad(cost, z)
    output = [cost, grad, gx]
    _invert = theano.function(inputs=[z, x, x_f], outputs=output)

    print('%.2f seconds to compile _bfgs function' % (time() - t))
    return _invert,z