Beispiel #1
0

    #smoothness_penalty = 0.001 * (total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,0:1,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,1:2,:,:]) + total_denoising_variation_penalty(x_reconstructed.transpose(0,3,1,2)[:,2:3,:,:]))

    square_loss = config['square_loss_weight'] * 1.0 * T.sum(T.sqr(normalize(x) - normalize(x_reconstructed)))

    loss = 0.0

    loss += l2_loss

    loss += square_loss

    netDist = NetDist(x, x_reconstructed, config)

    if config['style_weight'] > 0.0:
        style_loss, style_out_1, style_out_2 = netDist.get_dist_style()
        style_loss *= config['style_weight']
    else:
        style_loss = style_out_1 = style_out_2 = theano.shared(np.asarray(0.0).astype('float32'))

    if config['content_weight'] > 0.0:
        content_loss = config['content_weight'] * netDist.get_dist_content()
    else:
        content_loss = theano.shared(np.asarray(0.0).astype('float32'))

    #128 x 3 x 96 x 96
    imgGrad = T.grad(style_loss + content_loss, x_reconstructed)[0,:,:,0]

    loss += style_loss + content_loss

    loss += 1.0 * variational_loss
Beispiel #2
0
    for param in params:
        print param.get_value().shape

    variational_loss = 0.5 * T.sum(z_mean**2 + z_var - T.log(z_var) - 1.0)

    square_loss = config['square_loss_weight'] * 1.0 * T.mean(T.sqr(x - x_reconstructed))

    loss = 0.0

    loss += square_loss

    netDist = NetDist(x, x_reconstructed, config)

    if config['style_weight'] > 0.0:
        style_loss = config['style_weight'] * netDist.get_dist_style()
    else:
        style_loss = theano.shared(np.asarray(0.0).astype('float32'))

    if config['content_weight'] > 0.0:
        content_loss = config['content_weight'] * netDist.get_dist_content()
    else:
        content_loss = theano.shared(np.asarray(0.0).astype('float32'))

    loss += style_loss + content_loss

    loss += 1.0 * variational_loss

    all_grads = T.grad(loss, params)

    scaled_grads = lasagne.updates.total_norm_constraint(all_grads, 5.0)